Skip to content

Commit dea7f6b

Browse files
authored
fix: Hotfix, buffer incomplete JSON objects before parsing (#4037)
* fix: hotfix, buffer incomplete JSON objects before parsing * test: adjust mocked response to exactly equal real-world response * fix: small adjustment to backend error handling * fix: display backend's errr titlw * add test cases for buffered streaming in edge cases * fix: context label being covered by code panel * fix: code clean-up, bug fix and increased test coverage
1 parent c393ce6 commit dea7f6b

File tree

13 files changed

+536
-99
lines changed

13 files changed

+536
-99
lines changed

backend/companion/companionRouter.js

Lines changed: 25 additions & 17 deletions
Original file line numberDiff line numberDiff line change
@@ -148,35 +148,43 @@ async function handleChatMessage(req, res) {
148148
});
149149

150150
if (!response.ok) {
151-
const respJson = await response?.json();
152-
const error = new Error(respJson?.message ?? '');
153-
error.status = response?.status ?? 500;
154-
error.error =
155-
response?.statusText ??
156-
'Failed to fetch AI chat data. Request ID: ' + escape(req.id);
151+
const error = new Error();
152+
error.status = response.status;
153+
154+
if (response.status >= 500) {
155+
error.message = 'A temporary interruption occurred. Please try again.';
156+
error.error = 'Service is interrupted';
157+
} else {
158+
const respJson = await response?.json();
159+
error.message =
160+
respJson?.message ??
161+
'A temporary interruption occurred. Please try again.';
162+
error.error = respJson?.error ?? response?.statusText;
163+
}
164+
157165
throw error;
158166
}
159167

160168
const stream = Readable.fromWeb(response.body);
161169
await pipeline(stream, res);
162170
} catch (error) {
171+
req.log.warn(error);
163172
if (!res.headersSent) {
164-
req.log.warn(error);
165173
res.status(error.status).json({
166174
error: error.error,
167175
message: error.message,
168176
});
169177
} else {
170-
setTimeout(() => {
171-
req.log.warn(error);
172-
res.write(
173-
JSON.stringify({
174-
error:
175-
'Failed to fetch AI chat data. Request ID: ' + escape(req.id),
176-
}),
177-
);
178-
res.end();
179-
}, 500);
178+
res.write(
179+
JSON.stringify({
180+
streamingError: {
181+
message:
182+
'An error occured during streaming. Request ID: ' +
183+
escape(req.id),
184+
},
185+
}) + '\n',
186+
);
187+
res.end();
180188
}
181189
}
182190
}

src/components/KymaCompanion/api/error.ts

Lines changed: 0 additions & 14 deletions
This file was deleted.
Lines changed: 238 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,238 @@
1+
import { vi, describe, it, expect } from 'vitest';
2+
import { extractJsonObjectsFromChunk, readChunk } from './getChatResponse';
3+
import { ErrorType } from '../components/Chat/types';
4+
5+
describe('readChunk', () => {
6+
const decoder = new TextDecoder();
7+
const sessionID = 'test-session';
8+
9+
it('handles an entire JSON object in a single chunk', async () => {
10+
const handleChatResponse = vi.fn();
11+
const handleError = vi.fn();
12+
13+
const mockReader = {
14+
read: vi
15+
.fn()
16+
.mockResolvedValueOnce({
17+
done: false,
18+
value: new TextEncoder().encode('{"a": {"b": "c"}}\n'),
19+
})
20+
.mockResolvedValueOnce({ done: true }),
21+
};
22+
23+
await readChunk(
24+
mockReader,
25+
decoder,
26+
handleChatResponse,
27+
handleError,
28+
sessionID,
29+
);
30+
31+
expect(handleChatResponse).toHaveBeenCalledTimes(1);
32+
expect(handleChatResponse).toHaveBeenCalledWith({ a: { b: 'c' } });
33+
expect(handleError).not.toHaveBeenCalled();
34+
});
35+
36+
it('handles two entire JSON objects in a single chunk', async () => {
37+
const handleChatResponse = vi.fn();
38+
const handleError = vi.fn();
39+
40+
const mockReader = {
41+
read: vi
42+
.fn()
43+
.mockResolvedValueOnce({
44+
done: false,
45+
value: new TextEncoder().encode(
46+
'{"a": {"b": "c"}}\n{"d": {"e": "f"}}\n',
47+
),
48+
})
49+
.mockResolvedValueOnce({ done: true }),
50+
};
51+
52+
await readChunk(
53+
mockReader,
54+
decoder,
55+
handleChatResponse,
56+
handleError,
57+
sessionID,
58+
);
59+
60+
expect(handleChatResponse).toHaveBeenCalledTimes(2);
61+
expect(handleChatResponse).toHaveBeenCalledWith({ a: { b: 'c' } });
62+
expect(handleChatResponse).toHaveBeenCalledWith({ d: { e: 'f' } });
63+
expect(handleError).not.toHaveBeenCalled();
64+
});
65+
66+
it('handles a partial JSON object split across two chunks', async () => {
67+
const handleChatResponse = vi.fn();
68+
const handleError = vi.fn();
69+
70+
const mockReader = {
71+
read: vi
72+
.fn()
73+
.mockResolvedValueOnce({
74+
done: false,
75+
value: new TextEncoder().encode('{"a": {"b": "c"'),
76+
})
77+
.mockResolvedValueOnce({
78+
done: false,
79+
value: new TextEncoder().encode('}}\n'),
80+
})
81+
.mockResolvedValueOnce({ done: true }),
82+
};
83+
84+
await readChunk(
85+
mockReader,
86+
decoder,
87+
handleChatResponse,
88+
handleError,
89+
sessionID,
90+
);
91+
92+
expect(handleChatResponse).toHaveBeenCalledTimes(1);
93+
expect(handleChatResponse).toHaveBeenCalledWith({ a: { b: 'c' } });
94+
expect(handleError).not.toHaveBeenCalled();
95+
});
96+
97+
it('handles one entire and one partial JSON object in one chunk with completion in the next', async () => {
98+
const handleChatResponse = vi.fn();
99+
const handleError = vi.fn();
100+
101+
const mockReader = {
102+
read: vi
103+
.fn()
104+
.mockResolvedValueOnce({
105+
done: false,
106+
value: new TextEncoder().encode('{"a": {"b": "c"}}\n{"d": {"e": "f"'),
107+
})
108+
.mockResolvedValueOnce({
109+
done: false,
110+
value: new TextEncoder().encode('}}\n'),
111+
})
112+
.mockResolvedValueOnce({ done: true }),
113+
};
114+
115+
await readChunk(
116+
mockReader,
117+
decoder,
118+
handleChatResponse,
119+
handleError,
120+
sessionID,
121+
);
122+
123+
expect(handleChatResponse).toHaveBeenCalledTimes(2);
124+
expect(handleChatResponse).toHaveBeenCalledWith({ a: { b: 'c' } });
125+
expect(handleChatResponse).toHaveBeenCalledWith({ d: { e: 'f' } });
126+
expect(handleError).not.toHaveBeenCalled();
127+
});
128+
129+
it('handles stream ending with incomplete JSON data', async () => {
130+
const handleChatResponse = vi.fn();
131+
const handleError = vi.fn();
132+
133+
const mockReader = {
134+
read: vi
135+
.fn()
136+
.mockResolvedValueOnce({
137+
done: false,
138+
value: new TextEncoder().encode('{"a": {"b": "c"'),
139+
})
140+
.mockResolvedValueOnce({ done: true }),
141+
};
142+
143+
await readChunk(
144+
mockReader,
145+
decoder,
146+
handleChatResponse,
147+
handleError,
148+
sessionID,
149+
);
150+
151+
expect(handleChatResponse).not.toHaveBeenCalled();
152+
expect(handleError).toHaveBeenCalledWith({
153+
message: 'Stream ended with incomplete JSON data',
154+
type: ErrorType.FATAL,
155+
});
156+
});
157+
158+
it('handles JSON chunk with error', async () => {
159+
const handleChatResponse = vi.fn();
160+
const handleError = vi.fn();
161+
162+
const mockReader = {
163+
read: vi
164+
.fn()
165+
.mockResolvedValueOnce({
166+
done: false,
167+
value: new TextEncoder().encode(
168+
'{"streamingError": {"message": "test message" }}\n',
169+
),
170+
})
171+
.mockResolvedValueOnce({ done: true }),
172+
};
173+
174+
await readChunk(
175+
mockReader,
176+
decoder,
177+
handleChatResponse,
178+
handleError,
179+
sessionID,
180+
);
181+
182+
expect(handleChatResponse).not.toHaveBeenCalled();
183+
expect(handleError).toHaveBeenCalledWith({
184+
message: 'test message',
185+
type: ErrorType.FATAL,
186+
});
187+
});
188+
});
189+
190+
describe('extractJsonObjectsFromChunk', () => {
191+
it('extracts entire JSON object', () => {
192+
const input = '{"a": {"b": "c"}}\n';
193+
const result = extractJsonObjectsFromChunk(input);
194+
expect(result.completeObjects).toEqual(['{"a": {"b": "c"}}']);
195+
expect(result.remainingBuffer).toBe('');
196+
});
197+
198+
it('extracts two entire JSON objects', () => {
199+
const input = '{"a": {"b": "c"}}\n{"d": {"e": "f"}}\n';
200+
const result = extractJsonObjectsFromChunk(input);
201+
expect(result.completeObjects).toEqual([
202+
'{"a": {"b": "c"}}',
203+
'{"d": {"e": "f"}}',
204+
]);
205+
expect(result.remainingBuffer).toBe('');
206+
});
207+
208+
it('handles partial JSON object', () => {
209+
const input = '{"a": {"b": "';
210+
const result = extractJsonObjectsFromChunk(input);
211+
expect(result.completeObjects).toEqual([]);
212+
expect(result.remainingBuffer).toBe('{"a": {"b": "');
213+
});
214+
215+
it('extracts one entire and one partial JSON object', () => {
216+
const input = '{"a": {"b": "c"}}\n{"d": {"e": "';
217+
const result = extractJsonObjectsFromChunk(input);
218+
expect(result.completeObjects).toEqual(['{"a": {"b": "c"}}']);
219+
expect(result.remainingBuffer).toBe('{"d": {"e": "');
220+
});
221+
222+
it('extracts multiple JSON objects with a partial object', () => {
223+
const input = '{"a": {"b": "c"}}\n{"d": {"e": "f"}}\n{"g": {"h": "';
224+
const result = extractJsonObjectsFromChunk(input);
225+
expect(result.completeObjects).toEqual([
226+
'{"a": {"b": "c"}}',
227+
'{"d": {"e": "f"}}',
228+
]);
229+
expect(result.remainingBuffer).toBe('{"g": {"h": "');
230+
});
231+
232+
it('handles JSON object with internal newlines', () => {
233+
const input = '{"a": {\n "b": "c"\n}}\n';
234+
const result = extractJsonObjectsFromChunk(input);
235+
expect(result.completeObjects).toEqual(['{"a": {\n "b": "c"\n}}']);
236+
expect(result.remainingBuffer).toBe('');
237+
});
238+
});

0 commit comments

Comments
 (0)