|
1 | | -import '@dotenvx/dotenvx/config'; |
| 1 | +// import '@dotenvx/dotenvx/config'; |
2 | 2 |
|
3 | 3 | import { z } from 'zod'; |
4 | 4 |
|
5 | | -import { HumanMessage, SystemMessage } from '@langchain/core/messages'; |
6 | 5 | import { ChatGroq } from '@langchain/groq'; |
| 6 | +import { ChatOpenAI } from '@langchain/openai'; |
7 | 7 |
|
8 | | -const computerTopic = z.object({ |
9 | | - // syntax: z.string().describe("The syntax"), |
10 | | - briefDescription: z.string().describe('Brief description'), |
11 | | - usageDetails: z.string().optional().describe('Usage details or examples'), |
| 8 | +// const model = new ChatGroq({ |
| 9 | +// model: 'meta-llama/llama-4-scout-17b-16e-instruct', |
| 10 | +// temperature: 0, |
| 11 | +// }); |
| 12 | +const model = new ChatOpenAI({ |
| 13 | + // model: 'qwen/qwen3-4b-2507', |
| 14 | + model: 'google/gemma-3-12b', |
| 15 | + configuration: { |
| 16 | + baseURL: 'http://localhost:1234/v1', |
| 17 | + apiKey: 'not-needed', |
| 18 | + }, |
| 19 | + temperature: 0.5, |
12 | 20 | }); |
13 | 21 |
|
14 | | -const model = new ChatGroq({ |
15 | | - model: 'meta-llama/llama-4-scout-17b-16e-instruct', |
16 | | - temperature: 0, |
| 22 | +const phoneDevice = z.object({ |
| 23 | + name: z.string().describe('Device name'), |
| 24 | + description: z.string().describe('Brief description'), |
| 25 | + details: z.string().describe('Device details or use cases'), |
17 | 26 | }); |
18 | 27 |
|
19 | | -// 💡 we can pass a name for our schema in order to give the model additional context as to what our schema represents |
20 | | -// const structuredLlm = model.withStructuredOutput(computerTopic, { name: 'computerTopic' }); |
21 | | -// const res = await structuredLlm.invoke("introduce sort algorithms"); |
22 | | - |
23 | | -// 💡 We can also pass in an OpenAI-style JSON schema dict if you prefer not to use Zod |
24 | | -const structuredLlm = model.withStructuredOutput({ |
25 | | - name: 'computerTopic', |
26 | | - descripttion: 'knowledge about computer', |
27 | | - parameters: { |
28 | | - title: 'computerTopic', |
29 | | - type: 'object', |
30 | | - properties: { |
31 | | - briefDescription: { type: 'string', description: 'Brief description' }, |
32 | | - details: { type: 'string', description: 'Usage details or examples' }, |
33 | | - }, |
34 | | - required: ['briefDescription', 'details'], |
35 | | - }, |
36 | | -}); |
37 | | -const res = await structuredLlm.invoke('introduce sort algorithms', { |
38 | | - // @ts-expect-error llm-topic |
39 | | - name: 'computerTopic', |
| 28 | +// 💡 Option 1: we can pass a name for our schema in order to give the model additional context as to what our schema represents |
| 29 | +const structuredLlm = model.withStructuredOutput(phoneDevice, { |
| 30 | + name: 'phoneDevice', |
40 | 31 | }); |
| 32 | +const res = await structuredLlm.invoke( |
| 33 | + 'give a brief intro to a popular mobile phone', |
| 34 | +); |
| 35 | + |
| 36 | +// 💡 Option 2: We can also pass in an OpenAI-style JSON schema dict if you prefer not to use Zod |
| 37 | +// 👀 大多数本地模型不支持类似下面json schema的方式,但线上模型支持 |
| 38 | +// const structuredLlm = model.withStructuredOutput({ |
| 39 | +// name: 'phoneDevice', |
| 40 | +// descripttion: 'cellphone device intro', |
| 41 | +// parameters: { |
| 42 | +// name: 'phoneDevice', |
| 43 | +// type: 'object', |
| 44 | +// properties: { |
| 45 | +// name: { type: 'string', description: 'Device name' }, |
| 46 | +// description: { type: 'string', description: 'Brief description to device' }, |
| 47 | +// details: { type: 'string', description: 'Device details or use cases' }, |
| 48 | +// }, |
| 49 | +// required: ['name', 'description'], |
| 50 | +// }, |
| 51 | +// }); |
| 52 | +// const res = await structuredLlm.invoke('give a brief intro to a popular mobile phone', { |
| 53 | +// // @ts-expect-error llm-topic |
| 54 | +// name: 'phoneDevice', |
| 55 | +// }); |
41 | 56 |
|
42 | 57 | console.log(res); |
0 commit comments