Skip to content

Commit fb16d5a

Browse files
authored
Make sure thought content is included in handoff context (#6319)
Resolves #6295 Ensure the thought content gets included in handoff message conetxt, when the only tool call was handoff tool call.
1 parent 165c189 commit fb16d5a

File tree

2 files changed

+95
-10
lines changed

2 files changed

+95
-10
lines changed

Diff for: python/packages/autogen-agentchat/src/autogen_agentchat/agents/_assistant_agent.py

+8
Original file line numberDiff line numberDiff line change
@@ -1111,6 +1111,14 @@ def _check_and_handle_handoff(
11111111
)
11121112
)
11131113
handoff_context.append(FunctionExecutionResultMessage(content=tool_call_results))
1114+
elif model_result.thought:
1115+
# If no tool calls, but a thought exists, include it in the context
1116+
handoff_context.append(
1117+
AssistantMessage(
1118+
content=model_result.thought,
1119+
source=agent_name,
1120+
)
1121+
)
11141122

11151123
# Return response for the first handoff
11161124
return Response(

Diff for: python/packages/autogen-agentchat/tests/test_assistant_agent.py

+87-10
Original file line numberDiff line numberDiff line change
@@ -554,6 +554,7 @@ async def test_handoffs() -> None:
554554
],
555555
usage=RequestUsage(prompt_tokens=42, completion_tokens=43),
556556
cached=False,
557+
thought="Calling handoff function",
557558
)
558559
],
559560
model_info={
@@ -576,19 +577,95 @@ async def test_handoffs() -> None:
576577
)
577578
assert HandoffMessage in tool_use_agent.produced_message_types
578579
result = await tool_use_agent.run(task="task")
579-
assert len(result.messages) == 4
580+
assert len(result.messages) == 5
580581
assert isinstance(result.messages[0], TextMessage)
581582
assert result.messages[0].models_usage is None
582-
assert isinstance(result.messages[1], ToolCallRequestEvent)
583-
assert result.messages[1].models_usage is not None
584-
assert result.messages[1].models_usage.completion_tokens == 43
585-
assert result.messages[1].models_usage.prompt_tokens == 42
586-
assert isinstance(result.messages[2], ToolCallExecutionEvent)
587-
assert result.messages[2].models_usage is None
588-
assert isinstance(result.messages[3], HandoffMessage)
589-
assert result.messages[3].content == handoff.message
590-
assert result.messages[3].target == handoff.target
583+
assert isinstance(result.messages[1], ThoughtEvent)
584+
assert result.messages[1].content == "Calling handoff function"
585+
assert isinstance(result.messages[2], ToolCallRequestEvent)
586+
assert result.messages[2].models_usage is not None
587+
assert result.messages[2].models_usage.completion_tokens == 43
588+
assert result.messages[2].models_usage.prompt_tokens == 42
589+
assert isinstance(result.messages[3], ToolCallExecutionEvent)
590+
assert result.messages[3].models_usage is None
591+
assert isinstance(result.messages[4], HandoffMessage)
592+
assert result.messages[4].content == handoff.message
593+
assert result.messages[4].target == handoff.target
594+
assert result.messages[4].models_usage is None
595+
assert result.messages[4].context == [AssistantMessage(content="Calling handoff function", source="tool_use_agent")]
596+
597+
# Test streaming.
598+
model_client.reset()
599+
index = 0
600+
async for message in tool_use_agent.run_stream(task="task"):
601+
if isinstance(message, TaskResult):
602+
assert message == result
603+
else:
604+
assert message == result.messages[index]
605+
index += 1
606+
607+
608+
@pytest.mark.asyncio
609+
async def test_handoff_with_tool_call_context() -> None:
610+
handoff = Handoff(target="agent2")
611+
model_client = ReplayChatCompletionClient(
612+
[
613+
CreateResult(
614+
finish_reason="function_calls",
615+
content=[
616+
FunctionCall(id="1", arguments=json.dumps({}), name=handoff.name),
617+
FunctionCall(id="2", arguments=json.dumps({"input": "task"}), name="_pass_function"),
618+
],
619+
usage=RequestUsage(prompt_tokens=42, completion_tokens=43),
620+
cached=False,
621+
thought="Calling handoff function",
622+
)
623+
],
624+
model_info={
625+
"function_calling": True,
626+
"vision": True,
627+
"json_output": True,
628+
"family": ModelFamily.GPT_4O,
629+
"structured_output": True,
630+
},
631+
)
632+
tool_use_agent = AssistantAgent(
633+
"tool_use_agent",
634+
model_client=model_client,
635+
tools=[
636+
_pass_function,
637+
_fail_function,
638+
FunctionTool(_echo_function, description="Echo"),
639+
],
640+
handoffs=[handoff],
641+
)
642+
assert HandoffMessage in tool_use_agent.produced_message_types
643+
result = await tool_use_agent.run(task="task")
644+
assert len(result.messages) == 5
645+
assert isinstance(result.messages[0], TextMessage)
646+
assert result.messages[0].models_usage is None
647+
assert isinstance(result.messages[1], ThoughtEvent)
648+
assert result.messages[1].content == "Calling handoff function"
649+
assert isinstance(result.messages[2], ToolCallRequestEvent)
650+
assert result.messages[2].models_usage is not None
651+
assert result.messages[2].models_usage.completion_tokens == 43
652+
assert result.messages[2].models_usage.prompt_tokens == 42
653+
assert isinstance(result.messages[3], ToolCallExecutionEvent)
591654
assert result.messages[3].models_usage is None
655+
assert isinstance(result.messages[4], HandoffMessage)
656+
assert result.messages[4].content == handoff.message
657+
assert result.messages[4].target == handoff.target
658+
assert result.messages[4].models_usage is None
659+
assert result.messages[4].context == [
660+
AssistantMessage(
661+
content=[FunctionCall(id="2", arguments=r'{"input": "task"}', name="_pass_function")],
662+
source="tool_use_agent",
663+
thought="Calling handoff function",
664+
),
665+
FunctionExecutionResultMessage(
666+
content=[FunctionExecutionResult(call_id="2", content="pass", is_error=False, name="_pass_function")]
667+
),
668+
]
592669

593670
# Test streaming.
594671
model_client.reset()

0 commit comments

Comments
 (0)