Skip to content

Commit 045c363

Browse files
committed
added streaming output capability to the frontend.
1 parent 051f9f3 commit 045c363

File tree

6 files changed

+27
-5
lines changed

6 files changed

+27
-5
lines changed

CHANGELOG.md

Lines changed: 11 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -7,6 +7,17 @@ All notable changes to this project will be documented in this file.
77
### Added
88
- Added support for dynamic registration of `oxy`
99

10+
---
11+
12+
## [1.0.8] - 2025-11-14
13+
14+
### Added
15+
- Added streaming output capability to the frontend
16+
- Added Agent name field to think messages
17+
18+
### Changed
19+
- LChanged the default value of the LLM parameter stream to True
20+
1021
---
1122
## [1.0.6.3] - 2025-10-15
1223

CHANGELOG_zh.md

Lines changed: 11 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -9,6 +9,17 @@
99

1010
---
1111

12+
## [1.0.8] - 2025-11-14
13+
14+
### Added
15+
- 新增前端的流式输出能力
16+
17+
### Changed
18+
- LLM参数 stream 默认值修改为 True
19+
- think消息 增加 Agent 名称字段
20+
21+
---
22+
1223
## [1.0.6.3] - 2025-10-15
1324

1425
### Added

demo.py

Lines changed: 0 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -12,7 +12,6 @@
1212
api_key=os.getenv("DEFAULT_LLM_API_KEY"),
1313
base_url=os.getenv("DEFAULT_LLM_BASE_URL"),
1414
model_name=os.getenv("DEFAULT_LLM_MODEL_NAME"),
15-
# llm_params={"stream": True}, # 开启流式输出
1615
),
1716
preset_tools.time_tools,
1817
oxy.ReActAgent(
@@ -36,7 +35,6 @@
3635
is_master=True,
3736
name="master_agent",
3837
sub_agents=["time_agent", "file_agent", "math_agent"],
39-
# team_size=2, # 开启团队协作,每个agent会调用2次llm
4038
),
4139
]
4240

oxygent/oxy/llms/base_llm.py

Lines changed: 3 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -217,7 +217,9 @@ async def _post_send_message(self, oxy_response: OxyResponse):
217217
tool_call_dict = json.loads(extract_first_json(oxy_response.output))
218218
if "think" in tool_call_dict:
219219
msg = tool_call_dict["think"].strip()
220-
await oxy_request.send_message({"type": "think", "content": msg})
220+
await oxy_request.send_message(
221+
{"type": "think", "content": msg, "agent": oxy_request.caller}
222+
)
221223
except json.JSONDecodeError:
222224
pass
223225
except Exception as e:

oxygent/oxy/llms/http_llm.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -92,7 +92,7 @@ async def _execute(self, oxy_request: OxyRequest) -> OxyResponse:
9292
payload = {
9393
"messages": await self._get_messages(oxy_request),
9494
"model": self.model_name,
95-
"stream": False,
95+
"stream": True,
9696
}
9797
payload.update(llm_config)
9898
for k, v in self.llm_params.items():

oxygent/oxy/llms/openai_llm.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -53,7 +53,7 @@ async def _execute(self, oxy_request: OxyRequest) -> OxyResponse:
5353
payload = {
5454
"messages": await self._get_messages(oxy_request),
5555
"model": self.model_name,
56-
"stream": False,
56+
"stream": True,
5757
}
5858
payload.update(llm_config)
5959
for k, v in self.llm_params.items():

0 commit comments

Comments
 (0)