-
Notifications
You must be signed in to change notification settings - Fork 8
Expand file tree
/
Copy pathexample-fake-stream-usage.js
More file actions
executable file
Β·159 lines (129 loc) Β· 4.42 KB
/
example-fake-stream-usage.js
File metadata and controls
executable file
Β·159 lines (129 loc) Β· 4.42 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
#!/usr/bin/env node
/**
* Example usage of the fake stream method
* This shows different ways to use the fake streaming for debugging
*/
import aiService from './services/aiService.js';
async function example1_BasicUsage() {
console.log('π Example 1: Basic Fake Stream Usage\n');
const stream = await aiService.streamGeminiFake(
"What is the capital of France?",
[],
"gemini-1.5-flash",
"example-session-1"
);
console.log('Streaming response:');
for await (const chunk of stream) {
process.stdout.write(chunk.text);
}
console.log('\n');
}
async function example2_WithConversationHistory() {
console.log('π Example 2: With Conversation History\n');
const conversationHistory = [
{ role: 'user', content: 'Hello, I need help with JavaScript.' },
{ role: 'assistant', content: 'I\'d be happy to help you with JavaScript! What specific topic would you like to know about?' }
];
const stream = await aiService.streamGeminiFake(
"Can you explain closures?",
conversationHistory,
"gemini-1.5-flash",
"example-session-2"
);
console.log('Streaming response with context:');
for await (const chunk of stream) {
process.stdout.write(chunk.text);
}
console.log('\n');
}
async function example3_WithMCPTools() {
console.log('π Example 3: With MCP Tools Enabled\n');
const stream = await aiService.streamGeminiFake(
"Can you help me with a task?",
[],
"gemini-1.5-flash",
"example-session-3",
[],
true // Enable MCP tools simulation
);
console.log('Streaming response with potential tool calls:');
for await (const chunk of stream) {
if (chunk.toolCall) {
console.log(`\nπ§ Tool Call: ${chunk.toolCall.name}`);
console.log(` Parameters:`, chunk.toolCall.parameters);
} else {
process.stdout.write(chunk.text);
}
}
console.log('\n');
}
async function example4_ErrorHandling() {
console.log('π Example 4: Error Handling\n');
try {
const stream = await aiService.streamGeminiFake(
"This is a test message",
[],
"gemini-1.5-flash",
"example-session-4"
);
console.log('Streaming response:');
for await (const chunk of stream) {
process.stdout.write(chunk.text);
}
console.log('\n');
} catch (error) {
console.error('Error occurred:', error.message);
}
}
async function example5_ChunkAnalysis() {
console.log('π Example 5: Detailed Chunk Analysis\n');
const stream = await aiService.streamGeminiFake(
"Analyze this streaming response in detail.",
[],
"gemini-1.5-flash",
"example-session-5"
);
let chunkCount = 0;
let totalLength = 0;
const chunks = [];
console.log('Detailed chunk analysis:');
console.log('β'.repeat(60));
for await (const chunk of stream) {
chunkCount++;
totalLength += chunk.text.length;
chunks.push({
index: chunk.chunkIndex,
text: chunk.text.trim(),
length: chunk.text.length,
timestamp: chunk.timestamp
});
console.log(`Chunk ${chunkCount}: "${chunk.text.trim()}" (${chunk.text.length} chars)`);
}
console.log('β'.repeat(60));
console.log(`Total chunks: ${chunkCount}`);
console.log(`Total length: ${totalLength} characters`);
console.log(`Average chunk size: ${Math.round(totalLength / chunkCount)} characters`);
console.log(`First chunk: "${chunks[0]?.text}"`);
console.log(`Last chunk: "${chunks[chunks.length - 1]?.text}"`);
console.log();
}
async function runAllExamples() {
console.log('π Running Fake Stream Examples\n');
console.log('='.repeat(60));
await example1_BasicUsage();
console.log('='.repeat(60));
await example2_WithConversationHistory();
console.log('='.repeat(60));
await example3_WithMCPTools();
console.log('='.repeat(60));
await example4_ErrorHandling();
console.log('='.repeat(60));
await example5_ChunkAnalysis();
console.log('='.repeat(60));
console.log('β
All examples completed!');
}
// Run all examples
runAllExamples().catch(error => {
console.error('π₯ Examples failed:', error);
process.exit(1);
});