Skip to content

Commit 2ce0662

Browse files
committed
fix: now reasoning output is rendered in the UI
1 parent 5ba65b8 commit 2ce0662

6 files changed

Lines changed: 277 additions & 30 deletions

File tree

src/components/Chat.tsx

Lines changed: 101 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -8,6 +8,7 @@ import type { Message } from 'ai'
88
import { type Servers, type ToolItem } from '../lib/schemas'
99
import { ToolCallMessage } from './ToolCallMessage'
1010
import { Toolbox } from './Toolbox'
11+
import { ReasoningMessage } from './ReasoningMessage'
1112
import { useModel } from '../contexts/ModelContext'
1213
import { useUser } from '../contexts/UserContext'
1314
import { Button } from './ui/button'
@@ -34,6 +35,15 @@ type StreamEvent =
3435
| 'arguments_done'
3536
}
3637
| { type: 'user'; id: string; content: string }
38+
| {
39+
type: 'reasoning'
40+
effort: string
41+
summary: string | null
42+
model?: string
43+
serviceTier?: string
44+
temperature?: number
45+
topP?: number
46+
}
3747

3848
// Helper function to map tool types to status
3949
const getToolStatus = (
@@ -112,6 +122,84 @@ export function Chat() {
112122
try {
113123
const toolState = JSON.parse(line.slice(2))
114124

125+
// Handle reasoning summary streaming
126+
if (toolState.type === 'reasoning_summary_delta') {
127+
setStreamBuffer((prev) => {
128+
// Find the last reasoning message
129+
const last = prev[prev.length - 1]
130+
if (last && last.type === 'reasoning' && !last.done) {
131+
// Append delta to summary
132+
return [
133+
...prev.slice(0, -1),
134+
{
135+
...last,
136+
summary: (last.summary || '') + toolState.delta,
137+
effort: toolState.effort || last.effort,
138+
model: toolState.model || last.model,
139+
serviceTier: toolState.serviceTier || last.serviceTier,
140+
temperature: toolState.temperature ?? last.temperature,
141+
topP: toolState.topP ?? last.topP,
142+
},
143+
]
144+
} else {
145+
// Start a new reasoning message
146+
return [
147+
...prev,
148+
{
149+
type: 'reasoning',
150+
summary: toolState.delta,
151+
effort: toolState.effort || '',
152+
model: toolState.model,
153+
serviceTier: toolState.serviceTier,
154+
temperature: toolState.temperature,
155+
topP: toolState.topP,
156+
done: false,
157+
},
158+
]
159+
}
160+
})
161+
return
162+
}
163+
164+
if (toolState.type === 'reasoning_summary_done') {
165+
setStreamBuffer((prev) => {
166+
// Mark the last reasoning message as done
167+
const last = prev[prev.length - 1]
168+
if (last && last.type === 'reasoning' && !last.done) {
169+
return [
170+
...prev.slice(0, -1),
171+
{
172+
...last,
173+
done: true,
174+
effort: toolState.effort || last.effort,
175+
model: toolState.model || last.model,
176+
serviceTier: toolState.serviceTier || last.serviceTier,
177+
temperature: toolState.temperature ?? last.temperature,
178+
topP: toolState.topP ?? last.topP,
179+
},
180+
]
181+
}
182+
return prev
183+
})
184+
return
185+
}
186+
187+
if (toolState.type === 'reasoning') {
188+
setStreamBuffer((prev) => [
189+
...prev,
190+
{
191+
type: 'reasoning',
192+
effort: toolState.effort,
193+
summary: toolState.summary,
194+
model: toolState.model,
195+
serviceTier: toolState.serviceTier,
196+
temperature: toolState.temperature,
197+
topP: toolState.topP,
198+
},
199+
])
200+
return
201+
}
202+
115203
if ('delta' in toolState) {
116204
try {
117205
toolState.delta =
@@ -323,6 +411,19 @@ export function Chat() {
323411
/>
324412
)
325413
}
414+
} else if ('type' in event && event.type === 'reasoning') {
415+
return (
416+
<ReasoningMessage
417+
key={`reasoning-${idx}-${event.effort}-${event.summary || ''}`}
418+
effort={event.effort}
419+
summary={event.summary}
420+
model={event.model}
421+
serviceTier={event.serviceTier}
422+
temperature={event.temperature}
423+
topP={event.topP}
424+
isLoading={streaming && idx === renderEvents.length - 1}
425+
/>
426+
)
326427
} else if ('type' in event && event.type === 'assistant') {
327428
const assistantEvent = event as Extract<
328429
StreamEvent,

src/components/ChatMessage.tsx

Lines changed: 2 additions & 30 deletions
Original file line numberDiff line numberDiff line change
@@ -2,7 +2,7 @@ import { cn } from '../lib/utils'
22
import type { Message } from '../mcp/client'
33
import { formatTimestamp } from '../lib/utils'
44
import { Bot, User, CheckCircle2, Clock, AlertCircle } from 'lucide-react'
5-
import ReactMarkdown from 'react-markdown'
5+
import { MarkdownContent } from './MarkdownContent'
66

77
type ChatMessageProps = {
88
message: Message
@@ -49,35 +49,7 @@ export function ChatMessage({ message, isLoading }: ChatMessageProps) {
4949
)}
5050
>
5151
<div className="prose prose-sm dark:prose-invert max-w-none break-words break-all whitespace-pre-wrap">
52-
<ReactMarkdown
53-
components={{
54-
pre: ({ node, ...props }) => (
55-
<pre
56-
className="overflow-x-auto whitespace-pre-wrap break-words break-all"
57-
{...props}
58-
/>
59-
),
60-
code: ({ node, ...props }) => (
61-
<code
62-
className="break-words break-all whitespace-pre-wrap"
63-
{...props}
64-
/>
65-
),
66-
a: ({ href, children, ...props }) => (
67-
<a
68-
href={href}
69-
className="break-words break-all"
70-
target="_blank"
71-
rel="noopener noreferrer"
72-
{...props}
73-
>
74-
{children}
75-
</a>
76-
),
77-
}}
78-
>
79-
{message.content}
80-
</ReactMarkdown>
52+
<MarkdownContent content={message.content} />
8153
</div>
8254
</div>
8355

src/components/MarkdownContent.tsx

Lines changed: 41 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,41 @@
1+
import ReactMarkdown from 'react-markdown'
2+
3+
type MarkdownContentProps = {
4+
content: string
5+
}
6+
7+
export function MarkdownContent({ content }: MarkdownContentProps) {
8+
return (
9+
<div className="prose prose-sm dark:prose-invert max-w-none break-words break-all whitespace-pre-wrap">
10+
<ReactMarkdown
11+
components={{
12+
pre: ({ node, ...props }) => (
13+
<pre
14+
className="overflow-x-auto whitespace-pre-wrap break-words break-all"
15+
{...props}
16+
/>
17+
),
18+
code: ({ node, ...props }) => (
19+
<code
20+
className="break-words break-all whitespace-pre-wrap"
21+
{...props}
22+
/>
23+
),
24+
a: ({ href, children, ...props }) => (
25+
<a
26+
href={href}
27+
className="break-words break-all"
28+
target="_blank"
29+
rel="noopener noreferrer"
30+
{...props}
31+
>
32+
{children}
33+
</a>
34+
),
35+
}}
36+
>
37+
{content}
38+
</ReactMarkdown>
39+
</div>
40+
)
41+
}
Lines changed: 58 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,58 @@
1+
import { Brain } from 'lucide-react'
2+
import { cn } from '../lib/utils'
3+
import { MarkdownContent } from './MarkdownContent'
4+
5+
type ReasoningMessageProps = {
6+
effort: string
7+
summary: string | null
8+
model?: string
9+
serviceTier?: string
10+
temperature?: number
11+
topP?: number
12+
isLoading?: boolean
13+
}
14+
15+
export function ReasoningMessage({
16+
effort,
17+
summary,
18+
model,
19+
serviceTier,
20+
temperature,
21+
topP,
22+
isLoading,
23+
}: ReasoningMessageProps) {
24+
return (
25+
<div className="flex w-full max-w-full gap-2 py-2 animate-in fade-in justify-start">
26+
<div
27+
className={cn(
28+
'flex h-8 w-8 shrink-0 select-none items-center justify-center rounded-md bg-purple-100 text-purple-600 dark:bg-purple-900 dark:text-purple-300',
29+
isLoading && 'animate-[pulse_1.5s_ease-in-out_infinite] opacity-80',
30+
)}
31+
>
32+
<Brain className="h-5 w-5" />
33+
</div>
34+
35+
<div className="flex flex-col space-y-1 items-start w-full sm:w-[85%] md:w-[75%] lg:w-[65%]">
36+
<div className="rounded-2xl px-4 py-2 text-sm w-full bg-purple-50 text-purple-900 dark:bg-purple-950 dark:text-purple-100">
37+
<div className="font-medium mb-1">Reasoning</div>
38+
<div className="text-xs space-y-1">
39+
{effort && <div>Effort: {effort}</div>}
40+
{summary && (
41+
<div className="prose prose-sm dark:prose-invert max-w-none break-words break-all whitespace-pre-wrap">
42+
<MarkdownContent content={summary} />
43+
</div>
44+
)}
45+
{model && <div>Model: {model}</div>}
46+
{serviceTier && <div>Service Tier: {serviceTier}</div>}
47+
<div className="flex gap-4">
48+
{temperature !== undefined && (
49+
<div>Temperature: {temperature}</div>
50+
)}
51+
{topP !== undefined && <div>Top P: {topP}</div>}
52+
</div>
53+
</div>
54+
</div>
55+
</div>
56+
</div>
57+
)
58+
}

src/lib/streaming.ts

Lines changed: 68 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -21,6 +21,7 @@ export function streamText(
2121
}
2222

2323
let buffer = ''
24+
let reasoningSummaryBuffer = ''
2425

2526
const flush = () => {
2627
if (buffer) {
@@ -75,6 +76,48 @@ export function streamText(
7576
}
7677
}
7778
break
79+
80+
case 'response.content_part.added':
81+
case 'response.content_part.done':
82+
if (chunk.part?.type === 'output_text' && chunk.part.text) {
83+
buffer += chunk.part.text
84+
flush()
85+
}
86+
break
87+
88+
case 'response.reasoning.delta':
89+
if (typeof chunk.delta === 'string') {
90+
controller.enqueue(
91+
encoder.encode(
92+
`t:${JSON.stringify({
93+
type: 'reasoning',
94+
effort: chunk.effort,
95+
summary: chunk.delta,
96+
model: chunk.model,
97+
serviceTier: chunk.service_tier,
98+
temperature: chunk.temperature,
99+
topP: chunk.top_p,
100+
})}\n`,
101+
),
102+
)
103+
}
104+
break
105+
106+
case 'response.created':
107+
case 'response.in_progress':
108+
if (chunk.response?.reasoning) {
109+
controller.enqueue(
110+
encoder.encode(
111+
`t:${JSON.stringify({
112+
type: 'reasoning',
113+
effort: chunk.response.reasoning.effort,
114+
summary: chunk.response.reasoning.summary,
115+
})}\n`,
116+
),
117+
)
118+
}
119+
break
120+
78121
case 'response.mcp_call.failed':
79122
console.error('[TOOL CALL FAILED]', chunk)
80123

@@ -163,6 +206,31 @@ export function streamText(
163206
}
164207
break
165208

209+
case 'response.reasoning_summary_text.delta':
210+
if (typeof chunk.delta === 'string') {
211+
reasoningSummaryBuffer += chunk.delta
212+
}
213+
break
214+
215+
case 'response.reasoning_summary_text.done':
216+
if (reasoningSummaryBuffer) {
217+
controller.enqueue(
218+
encoder.encode(
219+
`t:${JSON.stringify({
220+
type: 'reasoning',
221+
effort: chunk.effort,
222+
summary: reasoningSummaryBuffer,
223+
model: chunk.model,
224+
serviceTier: chunk.service_tier,
225+
temperature: chunk.temperature,
226+
topP: chunk.top_p,
227+
})}\n`,
228+
),
229+
)
230+
reasoningSummaryBuffer = ''
231+
}
232+
break
233+
166234
default:
167235
break
168236
}

src/routes/api/chat.ts

Lines changed: 7 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -88,6 +88,13 @@ export const ServerRoute = createServerFileRoute('/api/chat').methods({
8888
input,
8989
stream: true,
9090
user: userId,
91+
...(model.startsWith('o3') || model.startsWith('o4')
92+
? {
93+
reasoning: {
94+
summary: 'detailed',
95+
},
96+
}
97+
: {}),
9198
})
9299

93100
return streamText(answer)

0 commit comments

Comments
 (0)