@@ -116,7 +116,7 @@ class ClaudeService extends BaseService {
116116 * @param {string } [options.model] - The Claude model to use, defaults to service default
117117 * @returns {TypedValue|Object } Returns either a TypedValue with streaming response or a completion object
118118 */
119- async complete ( { messages, stream, model, tools } ) {
119+ async complete ( { messages, stream, model, tools, max_tokens , temperature } ) {
120120 tools = FunctionCalling . make_claude_tools ( tools ) ;
121121
122122 let system_prompts ;
@@ -128,8 +128,8 @@ class ClaudeService extends BaseService {
128128 const init_chat_stream = async ( { chatStream } ) => {
129129 const completion = await this . anthropic . messages . stream ( {
130130 model : model ?? this . get_default_model ( ) ,
131- max_tokens : ( model === 'claude-3-5-sonnet-20241022' || model === 'claude-3-5-sonnet-20240620' ) ? 8192 : 4096 ,
132- temperature : 0 ,
131+ max_tokens : max_tokens || ( model === 'claude-3-5-sonnet-20241022' || model === 'claude-3-5-sonnet-20240620' ) ? 8192 : 4096 ,
132+ temperature : temperature || 0 ,
133133 system : PUTER_PROMPT + JSON . stringify ( system_prompts ) ,
134134 messages,
135135 ...( tools ? { tools } : { } ) ,
@@ -202,8 +202,8 @@ class ClaudeService extends BaseService {
202202
203203 const msg = await this . anthropic . messages . create ( {
204204 model : model ?? this . get_default_model ( ) ,
205- max_tokens : ( model === 'claude-3-5-sonnet-20241022' || model === 'claude-3-5-sonnet-20240620' ) ? 8192 : 4096 ,
206- temperature : 0 ,
205+ max_tokens : max_tokens || ( model === 'claude-3-5-sonnet-20241022' || model === 'claude-3-5-sonnet-20240620' ) ? 8192 : 4096 ,
206+ temperature : temperature || 0 ,
207207 system : PUTER_PROMPT + JSON . stringify ( system_prompts ) ,
208208 messages,
209209 ...( tools ? { tools } : { } ) ,
0 commit comments