-
Notifications
You must be signed in to change notification settings - Fork 281
Expand file tree
/
Copy pathapp.js
More file actions
206 lines (180 loc) · 5.65 KB
/
app.js
File metadata and controls
206 lines (180 loc) · 5.65 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
// <complete_code>
// Copyright (c) Microsoft Corporation. All rights reserved.
// Licensed under the MIT License.
// <imports>
import { OpenAI } from "openai";
import { FoundryLocalManager } from "foundry-local-sdk";
// </imports>
// By using an alias, the most suitable model will be downloaded
// to your end-user's device.
// TIP: You can find a list of available models by running the
// following command in your terminal: `foundry model list`.
const alias = "qwen2.5-0.5b";
// <tool_definitions>
function multiplyNumbers(first, second) {
return first * second;
}
// </tool_definitions>
async function runToolCallingExample() {
let manager = null;
let model = null;
try {
// <init>
console.log("Initializing Foundry Local SDK...");
manager = FoundryLocalManager.create({
appName: "foundry_local_samples",
serviceEndpoint: "http://localhost:5000",
logLevel: "info"
});
// </init>
// Download and register all execution providers.
let currentEp = '';
await manager.downloadAndRegisterEps((epName, percent) => {
if (epName !== currentEp) {
if (currentEp !== '') process.stdout.write('\n');
currentEp = epName;
}
process.stdout.write(`\r ${epName.padEnd(30)} ${percent.toFixed(1).padStart(5)}%`);
});
if (currentEp !== '') process.stdout.write('\n');
// <model_setup>
const catalog = manager.catalog;
model = await catalog.getModel(alias);
if (!model) {
throw new Error(`Model ${alias} not found`);
}
console.log(`Loading model ${model.id}...`);
await model.download();
await model.load();
console.log('✓ Model loaded');
manager.startWebService();
const endpoint = manager.urls[0];
if (!endpoint) {
throw new Error("Foundry Local web service did not return an endpoint.");
}
const openai = new OpenAI({
baseURL: `${endpoint.replace(/\/$/, "")}/v1`,
apiKey: "local"
});
// </model_setup>
// <tool_loop>
// Prepare messages
const messages = [
{
role: "system",
content: "You are a helpful AI assistant. If necessary, you can use any provided tools to answer the question."
},
{ role: "user", content: "What is the answer to 7 multiplied by 6?" }
];
// Prepare tools
const tools = [
{
type: "function",
function: {
name: "multiply_numbers",
description: "A tool for multiplying two numbers.",
parameters: {
type: "object",
properties: {
first: {
type: "integer",
description: "The first number in the operation"
},
second: {
type: "integer",
description: "The second number in the operation"
}
},
required: ["first", "second"]
}
}
}
];
// Start the conversation
console.log("Chat completion response:");
const toolCallResponses = [];
const firstStream = await openai.chat.completions.create({
model: model.id,
messages,
tools,
tool_choice: "required",
stream: true
});
for await (const chunk of firstStream) {
const content = chunk.choices?.[0]?.delta?.content;
if (content) {
process.stdout.write(content);
}
if (chunk.choices?.[0]?.finish_reason === "tool_calls") {
toolCallResponses.push(chunk);
}
}
console.log();
// Invoke tools called and append responses to the chat
for (const chunk of toolCallResponses) {
const toolCalls = chunk.choices?.[0]?.message?.tool_calls ?? chunk.choices?.[0]?.delta?.tool_calls ?? [];
for (const toolCall of toolCalls) {
if (toolCall.function?.name === "multiply_numbers") {
const args = JSON.parse(toolCall.function.arguments || "{}");
const first = args.first;
const second = args.second;
console.log(`\nInvoking tool: ${toolCall.function.name} with arguments ${first} and ${second}`);
const result = multiplyNumbers(first, second);
console.log(`Tool response: ${result}`);
messages.push({
role: "tool",
tool_call_id: toolCall.id,
content: result.toString()
});
}
}
}
console.log("\nTool calls completed. Prompting model to continue conversation...\n");
// Prompt the model to continue the conversation after the tool call
messages.push({
role: "system",
content: "Respond only with the answer generated by the tool."
});
// Run the next turn of the conversation
console.log("Chat completion response:");
const secondStream = await openai.chat.completions.create({
model: model.id,
messages,
tools,
tool_choice: "auto",
stream: true
});
for await (const chunk of secondStream) {
const content = chunk.choices?.[0]?.delta?.content;
if (content) {
process.stdout.write(content);
}
}
console.log();
// </tool_loop>
} finally {
// <cleanup>
if (model) {
try {
if (await model.isLoaded()) {
await model.unload();
}
} catch (cleanupError) {
console.warn("Cleanup warning while unloading model:", cleanupError);
}
}
if (manager) {
try {
manager.stopWebService();
} catch (cleanupError) {
console.warn("Cleanup warning while stopping service:", cleanupError);
}
}
// </cleanup>
}
}
await runToolCallingExample().catch((error) => {
console.error("Error running sample:", error);
process.exitCode = 1;
});
// </complete_code>