Skip to content

Commit 5d9f96a

Browse files
committed
now reasoning deltas stream in
1 parent a7220c1 commit 5d9f96a

2 files changed

Lines changed: 81 additions & 10 deletions

File tree

src/components/Chat.tsx

Lines changed: 64 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -31,6 +31,7 @@ type StreamEvent =
3131
serviceTier?: string
3232
temperature?: number
3333
topP?: number
34+
done?: boolean
3435
}
3536

3637
export function Chat() {
@@ -71,6 +72,68 @@ export function Chat() {
7172
try {
7273
const toolState = JSON.parse(line.slice(2))
7374

75+
// Handle reasoning summary streaming
76+
if (toolState.type === 'reasoning_summary_delta') {
77+
setStreamBuffer((prev) => {
78+
// Find the last reasoning message
79+
const last = prev[prev.length - 1]
80+
if (last && last.type === 'reasoning' && !last.done) {
81+
// Append delta to summary
82+
return [
83+
...prev.slice(0, -1),
84+
{
85+
...last,
86+
summary: (last.summary || '') + toolState.delta,
87+
effort: toolState.effort || last.effort,
88+
model: toolState.model || last.model,
89+
serviceTier: toolState.serviceTier || last.serviceTier,
90+
temperature: toolState.temperature ?? last.temperature,
91+
topP: toolState.topP ?? last.topP,
92+
},
93+
]
94+
} else {
95+
// Start a new reasoning message
96+
return [
97+
...prev,
98+
{
99+
type: 'reasoning',
100+
summary: toolState.delta,
101+
effort: toolState.effort || '',
102+
model: toolState.model,
103+
serviceTier: toolState.serviceTier,
104+
temperature: toolState.temperature,
105+
topP: toolState.topP,
106+
done: false,
107+
},
108+
]
109+
}
110+
})
111+
return
112+
}
113+
114+
if (toolState.type === 'reasoning_summary_done') {
115+
setStreamBuffer((prev) => {
116+
// Mark the last reasoning message as done
117+
const last = prev[prev.length - 1]
118+
if (last && last.type === 'reasoning' && !last.done) {
119+
return [
120+
...prev.slice(0, -1),
121+
{
122+
...last,
123+
done: true,
124+
effort: toolState.effort || last.effort,
125+
model: toolState.model || last.model,
126+
serviceTier: toolState.serviceTier || last.serviceTier,
127+
temperature: toolState.temperature ?? last.temperature,
128+
topP: toolState.topP ?? last.topP,
129+
},
130+
]
131+
}
132+
return prev
133+
})
134+
return
135+
}
136+
74137
if (toolState.type === 'reasoning') {
75138
setStreamBuffer((prev) => [
76139
...prev,
@@ -87,6 +150,7 @@ export function Chat() {
87150
return
88151
}
89152

153+
// Tool call fallback (for other tool types)
90154
if ('delta' in toolState) {
91155
try {
92156
toolState.delta =

src/lib/streaming.ts

Lines changed: 17 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -21,7 +21,6 @@ export function streamText(
2121
}
2222

2323
let buffer = ''
24-
let reasoningSummaryBuffer = ''
2524

2625
const flush = () => {
2726
if (buffer) {
@@ -208,29 +207,37 @@ export function streamText(
208207

209208
case 'response.reasoning_summary_text.delta':
210209
if (typeof chunk.delta === 'string') {
211-
reasoningSummaryBuffer += chunk.delta
212-
}
213-
break
214-
215-
case 'response.reasoning_summary_text.done':
216-
if (reasoningSummaryBuffer) {
217210
controller.enqueue(
218211
encoder.encode(
219212
`t:${JSON.stringify({
220-
type: 'reasoning',
213+
type: 'reasoning_summary_delta',
214+
delta: chunk.delta,
221215
effort: chunk.effort,
222-
summary: reasoningSummaryBuffer,
223216
model: chunk.model,
224217
serviceTier: chunk.service_tier,
225218
temperature: chunk.temperature,
226219
topP: chunk.top_p,
227220
})}\n`,
228221
),
229222
)
230-
reasoningSummaryBuffer = ''
231223
}
232224
break
233225

226+
case 'response.reasoning_summary_text.done':
227+
controller.enqueue(
228+
encoder.encode(
229+
`t:${JSON.stringify({
230+
type: 'reasoning_summary_done',
231+
effort: chunk.effort,
232+
model: chunk.model,
233+
serviceTier: chunk.service_tier,
234+
temperature: chunk.temperature,
235+
topP: chunk.top_p,
236+
})}\n`,
237+
),
238+
)
239+
break
240+
234241
default:
235242
break
236243
}

0 commit comments

Comments
 (0)