Skip to content

Commit b84b5b5

Browse files
committed
refactor(deepseek): remove unused getReasoningContent() method
- Remove currentReasoningContent field and accumulation logic - Remove getReasoningContent() method (was never called outside tests) - Simplify reasoning_content handling to just yield without accumulating - Remove associated tests for removed functionality
1 parent 76b8bfb commit b84b5b5

File tree

2 files changed

+1
-67
lines changed

2 files changed

+1
-67
lines changed

src/api/providers/__tests__/deepseek.spec.ts

Lines changed: 0 additions & 49 deletions
Original file line numberDiff line numberDiff line change
@@ -424,25 +424,6 @@ describe("DeepSeekHandler", () => {
424424
expect(reasoningChunks[1].text).toBe(" I'll analyze step by step.")
425425
})
426426

427-
it("should accumulate reasoning content via getReasoningContent()", async () => {
428-
const reasonerHandler = new DeepSeekHandler({
429-
...mockOptions,
430-
apiModelId: "deepseek-reasoner",
431-
})
432-
433-
// Before any API call, reasoning content should be undefined
434-
expect(reasonerHandler.getReasoningContent()).toBeUndefined()
435-
436-
const stream = reasonerHandler.createMessage(systemPrompt, messages)
437-
for await (const _chunk of stream) {
438-
// Consume the stream
439-
}
440-
441-
// After streaming, reasoning content should be accumulated
442-
const reasoningContent = reasonerHandler.getReasoningContent()
443-
expect(reasoningContent).toBe("Let me think about this... I'll analyze step by step.")
444-
})
445-
446427
it("should pass thinking parameter for deepseek-reasoner model", async () => {
447428
const reasonerHandler = new DeepSeekHandler({
448429
...mockOptions,
@@ -511,36 +492,6 @@ describe("DeepSeekHandler", () => {
511492
const toolCallChunks = chunks.filter((chunk) => chunk.type === "tool_call_partial")
512493
expect(toolCallChunks.length).toBeGreaterThan(0)
513494
expect(toolCallChunks[0].name).toBe("get_weather")
514-
515-
// Reasoning content should be accumulated for potential continuation
516-
const reasoningContent = reasonerHandler.getReasoningContent()
517-
expect(reasoningContent).toBeDefined()
518-
})
519-
520-
it("should reset reasoning content for each new request", async () => {
521-
const reasonerHandler = new DeepSeekHandler({
522-
...mockOptions,
523-
apiModelId: "deepseek-reasoner",
524-
})
525-
526-
// First request
527-
const stream1 = reasonerHandler.createMessage(systemPrompt, messages)
528-
for await (const _chunk of stream1) {
529-
// Consume the stream
530-
}
531-
532-
const reasoningContent1 = reasonerHandler.getReasoningContent()
533-
expect(reasoningContent1).toBeDefined()
534-
535-
// Second request should reset the reasoning content
536-
const stream2 = reasonerHandler.createMessage(systemPrompt, messages)
537-
for await (const _chunk of stream2) {
538-
// Consume the stream
539-
}
540-
541-
// The reasoning content should be fresh from the second request
542-
const reasoningContent2 = reasonerHandler.getReasoningContent()
543-
expect(reasoningContent2).toBe("Let me think about this... I'll analyze step by step.")
544495
})
545496
})
546497
})

src/api/providers/deepseek.ts

Lines changed: 1 addition & 18 deletions
Original file line numberDiff line numberDiff line change
@@ -24,8 +24,6 @@ type DeepSeekChatCompletionParams = OpenAI.Chat.ChatCompletionCreateParamsStream
2424
}
2525

2626
export class DeepSeekHandler extends OpenAiHandler {
27-
private currentReasoningContent: string = ""
28-
2927
constructor(options: ApiHandlerOptions) {
3028
super({
3129
...options,
@@ -37,15 +35,6 @@ export class DeepSeekHandler extends OpenAiHandler {
3735
})
3836
}
3937

40-
/**
41-
* Returns the accumulated reasoning content from the last API call.
42-
* This is used for interleaved thinking with tool calls - the reasoning_content
43-
* needs to be passed back to the API in subsequent requests within the same turn.
44-
*/
45-
getReasoningContent(): string | undefined {
46-
return this.currentReasoningContent || undefined
47-
}
48-
4938
override getModel() {
5039
const id = this.options.apiModelId ?? deepSeekDefaultModelId
5140
const info = deepSeekModels[id as keyof typeof deepSeekModels] || deepSeekModels[deepSeekDefaultModelId]
@@ -64,9 +53,6 @@ export class DeepSeekHandler extends OpenAiHandler {
6453
// Check if this is a thinking-enabled model (deepseek-reasoner)
6554
const isThinkingModel = modelId.includes("deepseek-reasoner")
6655

67-
// Reset reasoning content accumulator for this request
68-
this.currentReasoningContent = ""
69-
7056
// Convert messages to R1 format (merges consecutive same-role messages)
7157
// This is required for DeepSeek which does not support successive messages with the same role
7258
const convertedMessages = convertToR1Format([{ role: "user", content: systemPrompt }, ...messages])
@@ -128,12 +114,9 @@ export class DeepSeekHandler extends OpenAiHandler {
128114
// Handle reasoning_content from DeepSeek's interleaved thinking
129115
// This is the proper way DeepSeek sends thinking content in streaming
130116
if ("reasoning_content" in delta && delta.reasoning_content) {
131-
const reasoningText = (delta.reasoning_content as string) || ""
132-
// Accumulate reasoning content for potential tool call continuation
133-
this.currentReasoningContent += reasoningText
134117
yield {
135118
type: "reasoning",
136-
text: reasoningText,
119+
text: (delta.reasoning_content as string) || "",
137120
}
138121
}
139122

0 commit comments

Comments
 (0)