|
13 | 13 | Sequence,
|
14 | 14 | )
|
15 | 15 |
|
16 |
| -from autogen_core import CancellationToken, FunctionCall |
| 16 | +from autogen_core import CancellationToken, Component, ComponentModel, FunctionCall |
17 | 17 | from autogen_core.memory import Memory
|
18 | 18 | from autogen_core.model_context import (
|
19 | 19 | ChatCompletionContext,
|
|
28 | 28 | UserMessage,
|
29 | 29 | )
|
30 | 30 | from autogen_core.tools import FunctionTool, Tool
|
| 31 | +from pydantic import BaseModel |
| 32 | +from typing_extensions import Self |
31 | 33 |
|
32 | 34 | from .. import EVENT_LOGGER_NAME
|
33 | 35 | from ..base import Handoff as HandoffBase
|
|
49 | 51 | event_logger = logging.getLogger(EVENT_LOGGER_NAME)
|
50 | 52 |
|
51 | 53 |
|
52 |
| -class AssistantAgent(BaseChatAgent): |
| 54 | +class AssistantAgentConfig(BaseModel): |
| 55 | + """The declarative configuration for the assistant agent.""" |
| 56 | + |
| 57 | + name: str |
| 58 | + model_client: ComponentModel |
| 59 | + # tools: List[Any] | None = None # TBD |
| 60 | + handoffs: List[HandoffBase | str] | None = None |
| 61 | + model_context: ComponentModel | None = None |
| 62 | + description: str |
| 63 | + system_message: str | None = None |
| 64 | + reflect_on_tool_use: bool |
| 65 | + tool_call_summary_format: str |
| 66 | + |
| 67 | + |
| 68 | +class AssistantAgent(BaseChatAgent, Component[AssistantAgentConfig]): |
53 | 69 | """An agent that provides assistance with tool use.
|
54 | 70 |
|
55 | 71 | The :meth:`on_messages` returns a :class:`~autogen_agentchat.base.Response`
|
@@ -229,6 +245,9 @@ async def main() -> None:
|
229 | 245 | See `o1 beta limitations <https://platform.openai.com/docs/guides/reasoning#beta-limitations>`_ for more details.
|
230 | 246 | """
|
231 | 247 |
|
| 248 | + component_config_schema = AssistantAgentConfig |
| 249 | + component_provider_override = "autogen_agentchat.agents.AssistantAgent" |
| 250 | + |
232 | 251 | def __init__(
|
233 | 252 | self,
|
234 | 253 | name: str,
|
@@ -462,3 +481,40 @@ async def load_state(self, state: Mapping[str, Any]) -> None:
|
462 | 481 | assistant_agent_state = AssistantAgentState.model_validate(state)
|
463 | 482 | # Load the model context state.
|
464 | 483 | await self._model_context.load_state(assistant_agent_state.llm_context)
|
| 484 | + |
| 485 | + def _to_config(self) -> AssistantAgentConfig: |
| 486 | + """Convert the assistant agent to a declarative config.""" |
| 487 | + |
| 488 | + # raise an error if tools is not empty until it is implemented |
| 489 | + # TBD : Implement serializing tools and remove this check. |
| 490 | + if self._tools and len(self._tools) > 0: |
| 491 | + raise NotImplementedError("Serializing tools is not implemented yet.") |
| 492 | + |
| 493 | + return AssistantAgentConfig( |
| 494 | + name=self.name, |
| 495 | + model_client=self._model_client.dump_component(), |
| 496 | + # tools=[], # TBD |
| 497 | + handoffs=list(self._handoffs.values()), |
| 498 | + model_context=self._model_context.dump_component(), |
| 499 | + description=self.description, |
| 500 | + system_message=self._system_messages[0].content |
| 501 | + if self._system_messages and isinstance(self._system_messages[0].content, str) |
| 502 | + else None, |
| 503 | + reflect_on_tool_use=self._reflect_on_tool_use, |
| 504 | + tool_call_summary_format=self._tool_call_summary_format, |
| 505 | + ) |
| 506 | + |
| 507 | + @classmethod |
| 508 | + def _from_config(cls, config: AssistantAgentConfig) -> Self: |
| 509 | + """Create an assistant agent from a declarative config.""" |
| 510 | + return cls( |
| 511 | + name=config.name, |
| 512 | + model_client=ChatCompletionClient.load_component(config.model_client), |
| 513 | + # tools=[], # TBD |
| 514 | + handoffs=config.handoffs, |
| 515 | + model_context=None, |
| 516 | + description=config.description, |
| 517 | + system_message=config.system_message, |
| 518 | + reflect_on_tool_use=config.reflect_on_tool_use, |
| 519 | + tool_call_summary_format=config.tool_call_summary_format, |
| 520 | + ) |
0 commit comments