|
3033 | 3033 | "supports_tool_choice": true,
|
3034 | 3034 | "source": "https://techcommunity.microsoft.com/blog/machinelearningblog/announcing-deepseek-v3-on-azure-ai-foundry-and-github/4390438"
|
3035 | 3035 | },
|
| 3036 | + "azure_ai/deepseek-v3-0324": { |
| 3037 | + "max_tokens": 8192, |
| 3038 | + "max_input_tokens": 128000, |
| 3039 | + "max_output_tokens": 8192, |
| 3040 | + "input_cost_per_token": 0.00000114, |
| 3041 | + "output_cost_per_token": 0.00000456, |
| 3042 | + "litellm_provider": "azure_ai", |
| 3043 | + "mode": "chat", |
| 3044 | + "supports_function_calling": true, |
| 3045 | + "supports_tool_choice": true, |
| 3046 | + "source": "https://techcommunity.microsoft.com/blog/machinelearningblog/announcing-deepseek-v3-on-azure-ai-foundry-and-github/4390438" |
| 3047 | + }, |
3036 | 3048 | "azure_ai/jamba-instruct": {
|
3037 | 3049 | "max_tokens": 4096,
|
3038 | 3050 | "max_input_tokens": 70000,
|
|
3149 | 3161 | "source": "https://azuremarketplace.microsoft.com/en/marketplace/apps/metagenai.llama-3-3-70b-instruct-offer?tab=Overview",
|
3150 | 3162 | "supports_tool_choice": true
|
3151 | 3163 | },
|
| 3164 | + "azure_ai/Llama-4-Scout-17B-16E-Instruct": { |
| 3165 | + "max_tokens": 16384, |
| 3166 | + "max_input_tokens": 10000000, |
| 3167 | + "max_output_tokens": 16384, |
| 3168 | + "input_cost_per_token": 0.0000002, |
| 3169 | + "output_cost_per_token": 0.00000078, |
| 3170 | + "litellm_provider": "azure_ai", |
| 3171 | + "supports_function_calling": true, |
| 3172 | + "supports_vision": true, |
| 3173 | + "mode": "chat", |
| 3174 | + "source": "https://azure.microsoft.com/en-us/blog/introducing-the-llama-4-herd-in-azure-ai-foundry-and-azure-databricks/", |
| 3175 | + "supports_tool_choice": true |
| 3176 | + }, |
| 3177 | + "azure_ai/Llama-4-Maverick-17B-128E-Instruct-FP8": { |
| 3178 | + "max_tokens": 16384, |
| 3179 | + "max_input_tokens": 1000000, |
| 3180 | + "max_output_tokens": 16384, |
| 3181 | + "input_cost_per_token": 0.00000141, |
| 3182 | + "output_cost_per_token": 0.00000035, |
| 3183 | + "litellm_provider": "azure_ai", |
| 3184 | + "supports_function_calling": true, |
| 3185 | + "supports_vision": true, |
| 3186 | + "mode": "chat", |
| 3187 | + "source": "https://azure.microsoft.com/en-us/blog/introducing-the-llama-4-herd-in-azure-ai-foundry-and-azure-databricks/", |
| 3188 | + "supports_tool_choice": true |
| 3189 | + }, |
3152 | 3190 | "azure_ai/Llama-3.2-90B-Vision-Instruct": {
|
3153 | 3191 | "max_tokens": 2048,
|
3154 | 3192 | "max_input_tokens": 128000,
|
|
0 commit comments