Skip to content

Commit 8f12c5b

Browse files
committed
fix(agent): send final streaming message with isComplete: true
When the LLM stream finishes, the finish_reason typically arrives in a separate content-less chunk. Since we only send assistant_streaming_message for chunks with content, the last streaming message always had isComplete: false, causing the Web UI to appear stuck in streaming state. Send a final assistant_streaming_message with isComplete: true after the stream loop completes when content was streamed and finishReason is set. Closes #833
1 parent 239b654 commit 8f12c5b

1 file changed

Lines changed: 14 additions & 0 deletions

File tree

multimodal/tarko/agent/src/agent/runner/llm-processor.ts

Lines changed: 14 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -300,6 +300,7 @@ export class LLMProcessor {
300300
let hasReceivedFirstContent = false;
301301
let lastReasoningContentLength = 0;
302302
let reasoningCompleted = false;
303+
let hasStreamedContent = false;
303304

304305
this.logger.info(`llm stream start`);
305306

@@ -383,6 +384,7 @@ export class LLMProcessor {
383384

384385
// Only send content chunk if it contains actual content
385386
if (chunkResult.content) {
387+
hasStreamedContent = true;
386388
// Create content streaming event with only the incremental content
387389
const messageEvent = this.eventStream.createEvent('assistant_streaming_message', {
388390
content: chunkResult.content, // Only send the incremental content, not accumulated
@@ -417,6 +419,18 @@ export class LLMProcessor {
417419
return;
418420
}
419421

422+
// Send a final streaming message with isComplete: true if content was streamed
423+
// but the last content chunk didn't have finishReason set yet (common with OpenAI
424+
// where finish_reason arrives in a separate content-less chunk)
425+
if (streamingMode && hasStreamedContent && processingState.finishReason) {
426+
const finalStreamingEvent = this.eventStream.createEvent('assistant_streaming_message', {
427+
content: '',
428+
isComplete: true,
429+
messageId: messageId,
430+
});
431+
this.eventStream.sendEvent(finalStreamingEvent);
432+
}
433+
420434
// Finalize the stream processing
421435
const parsedResponse = toolCallEngine.finalizeStreamProcessing(processingState);
422436

0 commit comments

Comments
 (0)