Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
99 changes: 98 additions & 1 deletion lib/chat_models/chat_open_ai_responses.ex
Original file line number Diff line number Diff line change
Expand Up @@ -614,6 +614,10 @@ defmodule LangChain.ChatModels.ChatOpenAIResponses do
|> Utils.conditionally_add_to_map("file_id", file_id)
end

# Thinking content parts are output-only and should be omitted when sending to the API
def content_part_for_api(%ChatOpenAIResponses{} = _model, %ContentPart{type: :thinking}),
do: nil

# Ignore unknown, unsupported content parts
def content_part_for_api(%ChatOpenAIResponses{} = _model, %ContentPart{type: :unsupported}),
do: nil
Expand Down Expand Up @@ -937,6 +941,52 @@ defmodule LangChain.ChatModels.ChatOpenAIResponses do
# "sequence_number" => 4,
# "type" => "response.output_text.delta"
# }
def do_process_response(_model, %{
"type" => "response.reasoning.delta",
"output_index" => output_index,
"delta" => delta_text
}) do
data = %{
content: ContentPart.new!(%{type: :thinking, content: delta_text}),
status: :incomplete,
role: :assistant,
index: output_index
}

case MessageDelta.new(data) do
{:ok, message} ->
message

{:error, %Ecto.Changeset{} = changeset} ->
{:error, LangChainError.exception(changeset)}
end
end

def do_process_response(
_model,
%{
"type" => "response.output_text.delta",
"output_index" => output_index,
"delta" => delta_text
}
) do
data = %{
content: delta_text,
# Will need to be updated to :complete when the response is complete
status: :incomplete,
role: :assistant,
index: output_index
}

case MessageDelta.new(data) do
{:ok, message} ->
message

{:error, %Ecto.Changeset{} = changeset} ->
{:error, LangChainError.exception(changeset)}
end
end

def do_process_response(_model, %{"type" => "response.output_text.delta", "delta" => delta_text}) do
data = %{
content: delta_text,
Expand Down Expand Up @@ -977,6 +1027,33 @@ defmodule LangChain.ChatModels.ChatOpenAIResponses do
end
end

# This is the first event we get for a reasoning/thinking block.
# It is followed by a series of `response.reasoning.delta` events.
# Finally, it is followed by a `response.output_item.done` event.
def do_process_response(_model, %{
"type" => "response.output_item.added",
"output_index" => output_index,
"item" => %{
"type" => "reasoning",
"id" => _reasoning_id
}
}) do
data = %{
content: ContentPart.new!(%{type: :thinking, content: ""}),
status: :incomplete,
role: :assistant,
index: output_index
}

case MessageDelta.new(data) do
{:ok, delta} ->
delta

{:error, %Ecto.Changeset{} = changeset} ->
{:error, LangChainError.exception(changeset)}
end
end

# This is the first event we get for a function call.
# It is followed by a series of `response.function_call_arguments.delta` events.
# It is followed by a `response.function_call_arguments.done` event. (which we skip)
Expand Down Expand Up @@ -1040,6 +1117,27 @@ defmodule LangChain.ChatModels.ChatOpenAIResponses do
end
end

def do_process_response(_model, %{
"type" => "response.output_item.done",
"output_index" => output_index,
"item" => %{"type" => "reasoning"}
}) do
data = %{
content: ContentPart.new!(%{type: :thinking, content: ""}),
status: :complete,
role: :assistant,
index: output_index
}

case MessageDelta.new(data) do
{:ok, delta} ->
delta

{:error, %Ecto.Changeset{} = changeset} ->
{:error, LangChainError.exception(changeset)}
end
end

def do_process_response(_model, %{
"type" => "response.output_item.done",
"output_index" => output_index,
Expand Down Expand Up @@ -1128,7 +1226,6 @@ defmodule LangChain.ChatModels.ChatOpenAIResponses do
"response.mcp_call.in_progress",
"response.output_text.annotation.added",
"response.queued",
"response.reasoning.delta",
"response.reasoning_summary.delta",
"response.reasoning_summary.done",
"error"
Expand Down
101 changes: 100 additions & 1 deletion test/chat_models/chat_open_ai_responses_test.exs
Original file line number Diff line number Diff line change
Expand Up @@ -472,6 +472,30 @@ defmodule LangChain.ChatModels.ChatOpenAIResponsesTest do
assert part["type"] == "input_file"
assert part["file_url"] == "https://example.com/document.pdf"
end

test "omits thinking content parts when converting to API format" do
model = ChatOpenAIResponses.new!(%{"model" => @test_model})

# Create a message with thinking content (e.g., from a previous assistant response)
thinking_part =
LangChain.Message.ContentPart.new!(%{type: :thinking, content: "Some reasoning"})

text_part = LangChain.Message.ContentPart.text!("Here's my answer")

msg = LangChain.Message.new_assistant!([thinking_part, text_part])

api = ChatOpenAIResponses.for_api(model, msg)

# The result should be a list with the message and no tool calls
assert is_list(api)
[message_api] = api
assert message_api["type"] == "message"

# Content should only include the text part, not the thinking part
[content_part] = message_api["content"]
assert content_part["type"] == "input_text"
assert content_part["text"] == "Here's my answer"
end
end

describe "for_api/1 tool calls and results" do
Expand Down Expand Up @@ -810,6 +834,82 @@ defmodule LangChain.ChatModels.ChatOpenAIResponsesTest do
assert call3.arguments == %{"expression" => "1+1"}
end

test "parses reasoning output_item.added event", %{model: model} do
event = %{
"type" => "response.output_item.added",
"sequence_number" => 2,
"output_index" => 0,
"item" => %{
"id" => "rs_077ecb7bd77f1554016940159a98d081909d82480668e57471",
"type" => "reasoning",
"summary" => []
}
}

result = ChatOpenAIResponses.do_process_response(model, event)
assert %LangChain.MessageDelta{} = result
assert result.status == :incomplete
assert result.role == :assistant
assert result.index == 0
assert %LangChain.Message.ContentPart{type: :thinking, content: ""} = result.content
end

test "parses response.reasoning.delta event", %{model: model} do
event = %{
"type" => "response.reasoning.delta",
"output_index" => 0,
"delta" => "Let me think about this problem..."
}

result = ChatOpenAIResponses.do_process_response(model, event)
assert %LangChain.MessageDelta{} = result
assert result.status == :incomplete
assert result.role == :assistant
assert result.index == 0

assert %LangChain.Message.ContentPart{
type: :thinking,
content: "Let me think about this problem..."
} = result.content
end

test "parses reasoning output_item.done event", %{model: model} do
event = %{
"type" => "response.output_item.done",
"sequence_number" => 3,
"output_index" => 0,
"item" => %{
"id" => "rs_063b9657f7c2e68601694016d7008881909a128744538cebec",
"type" => "reasoning",
"summary" => []
}
}

result = ChatOpenAIResponses.do_process_response(model, event)
assert %LangChain.MessageDelta{} = result
assert result.status == :complete
assert result.role == :assistant
assert result.index == 0
assert %LangChain.Message.ContentPart{type: :thinking, content: ""} = result.content
end

test "parses response.output_text.delta with output_index", %{model: model} do
delta = %{
"type" => "response.output_text.delta",
"output_index" => 1,
"delta" => "Hello"
}

result = ChatOpenAIResponses.do_process_response(model, delta)

assert %LangChain.MessageDelta{
content: "Hello",
status: :incomplete,
role: :assistant,
index: 1
} = result
end

test "parses response.completed with token usage", %{model: model} do
completed = %{
"type" => "response.completed",
Expand All @@ -827,7 +927,6 @@ defmodule LangChain.ChatModels.ChatOpenAIResponsesTest do
%{"type" => "response.content_part.added"},
%{"type" => "response.content_part.done"},
%{"type" => "response.function_call_arguments.done"},
%{"type" => "response.reasoning.delta"},
%{"type" => "response.queued"}
]

Expand Down