@@ -34,7 +34,7 @@ class ModelConfig(BaseModel):
34
34
35
35
@validator ("model_type" , pre = True , always = True )
36
36
def validate_model_type (cls , v ):
37
- if v not in ["gpt" , "codellama" , "mixtral " ]:
37
+ if v not in ["gpt" , "codellama" , "mistral " ]:
38
38
raise ValueError (f"Unsupported model type: { v } " )
39
39
return v
40
40
@@ -55,7 +55,7 @@ def setup(self):
55
55
self .setup_gpt ()
56
56
elif self .model_type == "codellama" :
57
57
self .setup_codellama ()
58
- elif self .model_type == "mixtral " :
58
+ elif self .model_type == "mistral " :
59
59
self .setup_mixtral ()
60
60
61
61
def setup_gpt (self ):
@@ -71,13 +71,13 @@ def setup_gpt(self):
71
71
72
72
def setup_mixtral (self ):
73
73
self .llm = ChatOpenAI (
74
- model_name = "mistralai/Mixtral-8x7B-Instruct-v0.1 " ,
74
+ model_name = "mistralai/mistral-medium " ,
75
75
temperature = 0.2 ,
76
- api_key = self .secrets ["MIXTRAL_API_KEY " ],
76
+ api_key = self .secrets ["OPENROUTER_API_KEY " ],
77
77
max_tokens = 500 ,
78
78
callbacks = [self .callback_handler ],
79
79
streaming = True ,
80
- base_url = "https://api.together.xyz /v1" ,
80
+ base_url = "https://openrouter.ai/api /v1" ,
81
81
)
82
82
83
83
def setup_codellama (self ):
@@ -157,8 +157,8 @@ def load_chain(model_name="GPT-3.5", callback_handler=None):
157
157
model_type = "codellama"
158
158
elif "GPT-3.5" in model_name :
159
159
model_type = "gpt"
160
- elif "mixtral " in model_name .lower ():
161
- model_type = "mixtral "
160
+ elif "mistral " in model_name .lower ():
161
+ model_type = "mistral "
162
162
else :
163
163
raise ValueError (f"Unsupported model name: { model_name } " )
164
164
0 commit comments