|
4 | 4 |
|
5 | 5 | from eureka_ml_insights.models import (
|
6 | 6 | AzureOpenAIO1Model,
|
7 |
| - AzureOpenAIModel, |
8 | 7 | ClaudeModel,
|
9 | 8 | DirectOpenAIModel,
|
10 | 9 | DirectOpenAIO1Model,
|
|
14 | 13 | LLaVAModel,
|
15 | 14 | MistralServerlessAzureRestEndpointModel,
|
16 | 15 | RestEndpointModel,
|
17 |
| - #TnRModels, |
| 16 | + TestModel, |
18 | 17 | )
|
19 | 18 |
|
20 | 19 | from .config import ModelConfig
|
|
23 | 22 | # in the secret_key_params dictionary. OR you can provide the key name and key vault URL to fetch the key from Azure Key Vault.
|
24 | 23 | # You don't need to provide both the key_vault_url and local_keys_path. You can provide one of them based on your setup.
|
25 | 24 |
|
| 25 | + |
| 26 | +# Test model |
| 27 | +TEST_MODEL_CONFIG = ModelConfig(TestModel, {}) |
| 28 | + |
26 | 29 | # OpenAI models
|
27 | 30 |
|
28 |
| -''' |
29 | 31 | OPENAI_SECRET_KEY_PARAMS = {
|
30 | 32 | "key_name": "your_openai_secret_key_name",
|
31 | 33 | "local_keys_path": "keys/keys.json",
|
32 | 34 | "key_vault_url": None,
|
33 | 35 | }
|
34 |
| -''' |
35 |
| - |
36 |
| -OPENAI_SECRET_KEY_PARAMS = { |
37 |
| - "key_name": "openai", |
38 |
| - "local_keys_path": "keys/aifeval-vault-azure-net.json", |
39 |
| - "key_vault_url": "https://aifeval.vault.azure.net", |
40 |
| -} |
41 |
| - |
42 | 36 |
|
43 | 37 | OAI_O1_PREVIEW_CONFIG = ModelConfig(
|
44 | 38 | DirectOpenAIO1Model,
|
|
48 | 42 | },
|
49 | 43 | )
|
50 | 44 |
|
51 |
| -OAI_O1_MINI_CONFIG = ModelConfig( |
52 |
| - DirectOpenAIO1Model, |
53 |
| - { |
54 |
| - "model_name": "o1-mini-2024-09-12", |
55 |
| - "secret_key_params": OPENAI_SECRET_KEY_PARAMS, |
56 |
| - }, |
57 |
| -) |
58 |
| - |
59 | 45 | OAI_O1_PREVIEW_AUZRE_CONFIG = ModelConfig(
|
60 | 46 | AzureOpenAIO1Model,
|
61 | 47 | {
|
|
105 | 91 | },
|
106 | 92 | )
|
107 | 93 |
|
108 |
| -# Azure OAI models |
109 |
| -## Azure OAI models -- TNR Models |
110 |
| - |
111 |
| -TNR_SECRET_KEY_PARAMS = { |
112 |
| - "key_name": "tnrllmproxy", |
113 |
| - "local_keys_path": "keys/aifeval-vault-azure-net.json", |
114 |
| - "key_vault_url": "https://aifeval.vault.azure.net", |
115 |
| -} |
116 |
| - |
117 |
| -GCRAOAI8SW1_AZURE_OAI_O1_PREVIEW_CONFIG = ModelConfig( |
118 |
| - AzureOpenAIO1Model, |
119 |
| - { |
120 |
| - "url": "https://gcraoai8sw1.openai.azure.com/", |
121 |
| - "model_name": "o1-preview", |
122 |
| - "api_version": "2024-08-01-preview", |
123 |
| - } |
124 |
| -) |
125 |
| - |
126 |
| -GCRAOAI8SW1_AZURE_OAI_O1_MINI_CONFIG = ModelConfig( |
127 |
| - AzureOpenAIO1Model, |
128 |
| - { |
129 |
| - "url": "https://gcraoai8sw1.openai.azure.com/", |
130 |
| - "model_name": "o1-mini", |
131 |
| - "api_version": "2024-08-01-preview", |
132 |
| - } |
133 |
| -) |
134 |
| - |
135 |
| -GCRAOAI8SW1_AZURE_OAI_GPT4O_CONFIG = ModelConfig( |
136 |
| - AzureOpenAIO1Model, |
137 |
| - { |
138 |
| - "url": "https://gcraoai8sw1.openai.azure.com/", |
139 |
| - "model_name": "gpt-4o", |
140 |
| - "api_version": "2024-08-01-preview", |
141 |
| - "temperature": 1.0, |
142 |
| - |
143 |
| - } |
144 |
| -) |
145 |
| - |
146 |
| - |
147 |
| -GCRAOAI8SW1_AZURE_OAI_GPT4_T1_CONFIG = ModelConfig( |
148 |
| - AzureOpenAIO1Model, |
149 |
| - { |
150 |
| - "url": "https://gcraoai8sw1.openai.azure.com/", |
151 |
| - "model_name": "gpt-4", |
152 |
| - "api_version": "2024-08-01-preview", |
153 |
| - "temperature": 1.0, |
154 |
| - |
155 |
| - } |
156 |
| -) |
157 |
| - |
158 |
| -AzureOpenAIModel |
159 |
| - |
160 |
| -""" |
161 |
| -TNR_GPT4_1106_PREVIEW_CONFIG = ModelConfig( |
162 |
| - TnRModels, |
163 |
| - { |
164 |
| - "url": "https://trapi.research.microsoft.com/gcr/shared/nj/", |
165 |
| - "secret_key_params": TNR_SECRET_KEY_PARAMS, |
166 |
| - "model_name": "gpt-4", |
167 |
| - }, |
168 |
| -) |
169 |
| -
|
170 |
| -TNR_GPT4_VISION_PREVIEW_CONFIG = ModelConfig( |
171 |
| - TnRModels, |
172 |
| - { |
173 |
| - "url": "https://trapi.research.microsoft.com/gcr/shared/nj/", |
174 |
| - "secret_key_params": TNR_SECRET_KEY_PARAMS, |
175 |
| - "model_name": "gpt-4-turbo-v", |
176 |
| - }, |
177 |
| -) |
178 |
| -
|
179 |
| -TNR_GPT4V_TURBO_2024_04_09_CONFIG = ModelConfig( |
180 |
| - TnRModels, |
181 |
| - { |
182 |
| - "url": "https://trapi.research.microsoft.com/gcr/shared/nj/", |
183 |
| - "secret_key_params": TNR_SECRET_KEY_PARAMS, |
184 |
| - "model_name": "gpt-4-turbo", |
185 |
| - }, |
186 |
| -) |
187 |
| -
|
188 |
| -TNR_GPT4O_2024_05_13_CONFIG = ModelConfig( |
189 |
| - TnRModels, |
190 |
| - { |
191 |
| - "url": "https://trapi.research.microsoft.com/gcr/shared/nj/", |
192 |
| - "secret_key_params": TNR_SECRET_KEY_PARAMS, |
193 |
| - "model_name": "gpt-4o", |
194 |
| - }, |
195 |
| -) |
196 |
| -""" |
197 |
| - |
198 | 94 | # Gemini models
|
199 |
| -''' |
200 | 95 | GEMINI_SECRET_KEY_PARAMS = {
|
201 | 96 | "key_name": "your_gemini_secret_key_name",
|
202 | 97 | "local_keys_path": "keys/keys.json",
|
203 | 98 | "key_vault_url": None,
|
204 | 99 | }
|
205 |
| -''' |
206 |
| - |
207 |
| -GEMINI_SECRET_KEY_PARAMS = { |
208 |
| - "key_name": "aif-eval-gemini-firstproject", |
209 |
| - "local_keys_path": "keys/aifeval-vault-azure-net.json", |
210 |
| - "key_vault_url": "https://aifeval.vault.azure.net", |
211 |
| -} |
212 | 100 |
|
213 | 101 | GEMINI_V15_PRO_CONFIG = ModelConfig(
|
214 | 102 | GeminiModel,
|
|
218 | 106 | },
|
219 | 107 | )
|
220 | 108 |
|
221 |
| - |
222 |
| -GEMINI_V15_PRO_T1_CONFIG = ModelConfig( |
223 |
| - GeminiModel, |
224 |
| - { |
225 |
| - "model_name": "gemini-1.5-pro", |
226 |
| - "secret_key_params": GEMINI_SECRET_KEY_PARAMS, |
227 |
| - "temperature":1.0, |
228 |
| - }, |
229 |
| -) |
230 |
| - |
231 |
| -GEMINI_EXP_1206_T1_CONFIG = ModelConfig( |
232 |
| - GeminiModel, |
233 |
| - { |
234 |
| - "model_name": "gemini-exp-1206", |
235 |
| - "secret_key_params": GEMINI_SECRET_KEY_PARAMS, |
236 |
| - "temperature":1.0, |
237 |
| - }, |
238 |
| -) |
239 |
| - |
240 |
| - |
241 |
| -GEMINI_EXP_1121_T1_CONFIG = ModelConfig( |
242 |
| - GeminiModel, |
243 |
| - { |
244 |
| - "model_name": "gemini-exp-1121", |
245 |
| - "secret_key_params": GEMINI_SECRET_KEY_PARAMS, |
246 |
| - "temperature":1.0, |
247 |
| - }, |
248 |
| -) |
249 |
| - |
250 |
| - |
251 |
| - |
252 | 109 | GEMINI_V1_PRO_CONFIG = ModelConfig(
|
253 | 110 | GeminiModel,
|
254 | 111 | {
|
|
258 | 115 | )
|
259 | 116 |
|
260 | 117 | # Claude models
|
261 |
| -''' |
262 | 118 | CLAUDE_SECRET_KEY_PARAMS = {
|
263 | 119 | "key_name": "your_claude_secret_key_name",
|
264 | 120 | "local_keys_path": "keys/keys.json",
|
265 | 121 | "key_vault_url": None,
|
266 | 122 | }
|
267 |
| -''' |
268 |
| - |
269 |
| -CLAUDE_SECRET_KEY_PARAMS = { |
270 |
| - "key_name": "aif-eval-claude", |
271 |
| - "local_keys_path": "keys/aifeval-vault-azure-net.json", |
272 |
| - "key_vault_url": "https://aifeval.vault.azure.net", |
273 |
| -} |
274 |
| - |
275 | 123 |
|
276 | 124 | CLAUDE_3_OPUS_CONFIG = ModelConfig(
|
277 | 125 | ClaudeModel,
|
|
289 | 137 | },
|
290 | 138 | )
|
291 | 139 |
|
292 |
| -CLAUDE_3_5_SONNET_T1_CONFIG = ModelConfig( |
293 |
| - ClaudeModel, |
294 |
| - { |
295 |
| - "secret_key_params": CLAUDE_SECRET_KEY_PARAMS, |
296 |
| - "model_name": "claude-3-5-sonnet-20240620", |
297 |
| - "temperature":1.0, |
298 |
| - }, |
299 |
| -) |
300 |
| - |
301 |
| -CLAUDE_3_5_SONNET_SEARCH_T1_CONFIG = ModelConfig( |
302 |
| - ClaudeModel, |
303 |
| - { |
304 |
| - "secret_key_params": CLAUDE_SECRET_KEY_PARAMS, |
305 |
| - "model_name": "claude-3-5-sonnet-20241022", |
306 |
| - "temperature": 1.0, |
307 |
| - }, |
308 |
| -) |
309 |
| - |
310 |
| -CLAUDE_3_5_SONNET_SEARCH_CONFIG = ModelConfig( |
311 |
| - ClaudeModel, |
312 |
| - { |
313 |
| - "secret_key_params": CLAUDE_SECRET_KEY_PARAMS, |
314 |
| - "model_name": "claude-3-5-sonnet-20241022", |
315 |
| - }, |
316 |
| -) |
317 |
| - |
318 | 140 | # LLAVA models
|
319 | 141 | LLAVAHF_V16_34B_CONFIG = ModelConfig(
|
320 | 142 | LLaVAHuggingFaceModel,
|
|
377 | 199 | "model_name": "Mistral-large-2407",
|
378 | 200 | },
|
379 | 201 | )
|
380 |
| - |
381 |
| - |
382 |
| - |
383 |
| -AIF_NT_MISTRAL_LARGE_2_2407_T1_CONFIG = ModelConfig( |
384 |
| - MistralServerlessAzureRestEndpointModel, |
385 |
| - { |
386 |
| - "url": "https://Mistral-large-2407-aifeval.eastus.models.ai.azure.com/v1/chat/completions", |
387 |
| - "secret_key_params": { |
388 |
| - "key_name": "aif-nt-mistral-large-2-2407", |
389 |
| - "local_keys_path": "keys/aifeval-vault-azure-net.json", |
390 |
| - "key_vault_url": "https://aifeval.vault.azure.net", |
391 |
| - }, |
392 |
| - "model_name": "Mistral-large-2407-aifeval", |
393 |
| - "temperature": 1.0, |
394 |
| - |
395 |
| - }, |
396 |
| -) |
397 |
| - |
398 |
| - |
399 |
| -GCR_LLAMA3_1_70B_INSTRUCT_CONFIG = ModelConfig( |
400 |
| - RestEndpointModel, |
401 |
| - { |
402 |
| - "url": "https://gcr-llama31-70b-instruct.westus3.inference.ml.azure.com/score", |
403 |
| - "secret_key_params": { |
404 |
| - "key_name": "meta-llama-3-1-70b-instruct-1", |
405 |
| - "local_keys_path": "keys/aifeval-vault-azure-net.json", |
406 |
| - "key_vault_url": "https://aifeval.vault.azure.net", |
407 |
| - }, |
408 |
| - "model_name": "meta-llama-3-1-70b-instruct-1", |
409 |
| - "temperature": 1.0, |
410 |
| - |
411 |
| - }, |
412 |
| -) |
413 |
| - |
414 |
| -AIF_NT_LLAMA3_1_405B_INSTRUCT_CONFIG = ModelConfig( |
415 |
| - LlamaServerlessAzureRestEndpointModel, |
416 |
| - { |
417 |
| - "url": "https://Meta-Llama-3-1-405B-Instruct-aif.eastus.models.ai.azure.com/v1/chat/completions", |
418 |
| - "secret_key_params": { |
419 |
| - "key_name": "aif-nt-meta-llama-3-1-405b-instruct-1", |
420 |
| - "local_keys_path": "keys/aifeval-vault-azure-net.json", |
421 |
| - "key_vault_url": "https://aifeval.vault.azure.net", |
422 |
| - }, |
423 |
| - "model_name": "Meta-Llama-3-1-405B-Instruct-aif", |
424 |
| - "temperature": 1.0, |
425 |
| - |
426 |
| - }, |
427 |
| -) |
0 commit comments