Skip to content

Commit 03045e2

Browse files
committed
fix: temperature and max_tokens (interface + OpenAI)
1 parent 2a288e6 commit 03045e2

File tree

2 files changed

+8
-2
lines changed

2 files changed

+8
-2
lines changed

src/backend/src/modules/puterai/AIInterfaceService.js

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -80,6 +80,8 @@ class AIInterfaceService extends BaseService {
8080
stream: { type: 'flag' },
8181
response: { type: 'json' },
8282
model: { type: 'string' },
83+
temperature: { type: 'number' },
84+
max_tokens: { type: 'number' },
8385
},
8486
result: { type: 'json' },
8587
}

src/backend/src/modules/puterai/OpenAICompletionService.js

Lines changed: 6 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -216,7 +216,10 @@ class OpenAICompletionService extends BaseService {
216216
* @returns {Promise<Object>} The completion response containing message and usage info
217217
* @throws {Error} If messages are invalid or content is flagged by moderation
218218
*/
219-
async complete (messages, { stream, moderation, model, tools }) {
219+
async complete (messages, {
220+
stream, moderation, model, tools,
221+
temperature, max_tokens,
222+
}) {
220223
// Validate messages
221224
if ( ! Array.isArray(messages) ) {
222225
throw new Error('`messages` must be an array');
@@ -254,7 +257,8 @@ class OpenAICompletionService extends BaseService {
254257
messages: messages,
255258
model: model,
256259
...(tools ? { tools } : {}),
257-
// max_tokens,
260+
...(max_tokens ? { max_tokens } : {}),
261+
...(temperature ? { temperature } : {}),
258262
stream,
259263
...(stream ? {
260264
stream_options: { include_usage: true },

0 commit comments

Comments
 (0)