Skip to content

Commit 82dbd59

Browse files
committed
update
1 parent 77c50a5 commit 82dbd59

File tree

1 file changed

+78
-134
lines changed

1 file changed

+78
-134
lines changed

camel/models/openai_responses_adapter.py

Lines changed: 78 additions & 134 deletions
Original file line numberDiff line numberDiff line change
@@ -69,6 +69,30 @@ def _extract_tool_call(item: Any) -> Dict[str, Any]:
6969
}
7070

7171

72+
def _build_chat_completion_chunk(
73+
*,
74+
chunk_id: str,
75+
model: str,
76+
delta: Dict[str, Any],
77+
finish_reason: Optional[str] = None,
78+
usage: Optional[Dict[str, int]] = None,
79+
) -> ChatCompletionChunk:
80+
return ChatCompletionChunk.construct(
81+
id=chunk_id,
82+
choices=[
83+
{
84+
"index": 0,
85+
"delta": delta,
86+
"finish_reason": finish_reason,
87+
}
88+
],
89+
created=int(time.time()),
90+
model=model,
91+
object="chat.completion.chunk",
92+
usage=usage,
93+
)
94+
95+
7296
def response_to_chat_completion(
7397
response: Any,
7498
model: str,
@@ -121,8 +145,8 @@ def iter_response_events_to_chat_chunks(
121145
model: str,
122146
on_response_completed: Optional[Callable[[str], None]] = None,
123147
) -> Generator[ChatCompletionChunk, None, None]:
124-
saw_tool_call = False
125-
saw_finish_reason = False
148+
has_tool_call = False
149+
has_finish_reason = False
126150
response_id = ""
127151
usage: Optional[Dict[str, int]] = None
128152
tool_idx_map: Dict[int, int] = {}
@@ -143,18 +167,10 @@ def iter_response_events_to_chat_chunks(
143167
delta = _get(event, "delta", "") or ""
144168
chunk_id = _get(event, "item_id", response_id)
145169
if delta:
146-
yield ChatCompletionChunk.construct(
147-
id=chunk_id,
148-
choices=[
149-
{
150-
"index": 0,
151-
"delta": {"content": delta},
152-
"finish_reason": None,
153-
}
154-
],
155-
created=int(time.time()),
170+
yield _build_chat_completion_chunk(
171+
chunk_id=chunk_id,
156172
model=model,
157-
object="chat.completion.chunk",
173+
delta={"content": delta},
158174
)
159175
continue
160176

@@ -165,7 +181,7 @@ def iter_response_events_to_chat_chunks(
165181
):
166182
item = _get(event, "item")
167183
if _get(item, "type") == "function_call":
168-
saw_tool_call = True
184+
has_tool_call = True
169185
out_idx = int(_get(event, "output_index", 0))
170186
mapped_idx = tool_idx_map.setdefault(
171187
out_idx, len(tool_idx_map)
@@ -187,18 +203,10 @@ def iter_response_events_to_chat_chunks(
187203
}
188204
tool_meta_emitted[out_idx] = True
189205
chunk_id = _get(item, "id", response_id)
190-
yield ChatCompletionChunk.construct(
191-
id=chunk_id,
192-
choices=[
193-
{
194-
"index": 0,
195-
"delta": {"tool_calls": [tc]},
196-
"finish_reason": None,
197-
}
198-
],
199-
created=int(time.time()),
206+
yield _build_chat_completion_chunk(
207+
chunk_id=chunk_id,
200208
model=model,
201-
object="chat.completion.chunk",
209+
delta={"tool_calls": [tc]},
202210
)
203211
else: # response.output_item.done
204212
if not tool_args_delta_seen.get(out_idx, False):
@@ -215,18 +223,10 @@ def iter_response_events_to_chat_chunks(
215223
tc["type"] = "function"
216224
tc["function"]["name"] = _get(item, "name", "")
217225
chunk_id = _get(item, "id", response_id)
218-
yield ChatCompletionChunk.construct(
219-
id=chunk_id,
220-
choices=[
221-
{
222-
"index": 0,
223-
"delta": {"tool_calls": [tc]},
224-
"finish_reason": None,
225-
}
226-
],
227-
created=int(time.time()),
226+
yield _build_chat_completion_chunk(
227+
chunk_id=chunk_id,
228228
model=model,
229-
object="chat.completion.chunk",
229+
delta={"tool_calls": [tc]},
230230
)
231231
continue
232232

@@ -235,7 +235,7 @@ def iter_response_events_to_chat_chunks(
235235
"response.function_call_arguments.delta",
236236
"response.output_item.function_call_arguments.delta",
237237
):
238-
saw_tool_call = True
238+
has_tool_call = True
239239
out_idx = int(_get(event, "output_index", 0))
240240
mapped_idx = tool_idx_map.setdefault(out_idx, len(tool_idx_map))
241241
tool_args_delta_seen[out_idx] = True
@@ -245,18 +245,10 @@ def iter_response_events_to_chat_chunks(
245245
"function": {"arguments": delta},
246246
}
247247
chunk_id = _get(event, "item_id", response_id)
248-
yield ChatCompletionChunk.construct(
249-
id=chunk_id,
250-
choices=[
251-
{
252-
"index": 0,
253-
"delta": {"tool_calls": [tc]},
254-
"finish_reason": None,
255-
}
256-
],
257-
created=int(time.time()),
248+
yield _build_chat_completion_chunk(
249+
chunk_id=chunk_id,
258250
model=model,
259-
object="chat.completion.chunk",
251+
delta={"tool_calls": [tc]},
260252
)
261253
continue
262254

@@ -267,32 +259,24 @@ def iter_response_events_to_chat_chunks(
267259
usage = _usage_to_openai(_get(resp, "usage"))
268260
if on_response_completed is not None and response_id:
269261
on_response_completed(response_id)
270-
finish_reason = "tool_calls" if saw_tool_call else "stop"
271-
saw_finish_reason = True
272-
yield ChatCompletionChunk.construct(
273-
id=response_id,
274-
choices=[
275-
{
276-
"index": 0,
277-
"delta": {},
278-
"finish_reason": finish_reason,
279-
}
280-
],
281-
created=int(time.time()),
262+
finish_reason = "tool_calls" if has_tool_call else "stop"
263+
has_finish_reason = True
264+
yield _build_chat_completion_chunk(
265+
chunk_id=response_id,
282266
model=model,
283-
object="chat.completion.chunk",
267+
delta={},
268+
finish_reason=finish_reason,
284269
usage=usage,
285270
)
286271
continue
287272

288273
# Safety fallback for abnormal stream termination.
289-
if not saw_finish_reason:
290-
yield ChatCompletionChunk.construct(
291-
id=response_id,
292-
choices=[{"index": 0, "delta": {}, "finish_reason": "stop"}],
293-
created=int(time.time()),
274+
if not has_finish_reason:
275+
yield _build_chat_completion_chunk(
276+
chunk_id=response_id,
294277
model=model,
295-
object="chat.completion.chunk",
278+
delta={},
279+
finish_reason="stop",
296280
usage=usage,
297281
)
298282

@@ -302,8 +286,8 @@ async def aiter_response_events_to_chat_chunks(
302286
model: str,
303287
on_response_completed: Optional[Callable[[str], None]] = None,
304288
) -> AsyncGenerator[ChatCompletionChunk, None]:
305-
saw_tool_call = False
306-
saw_finish_reason = False
289+
has_tool_call = False
290+
has_finish_reason = False
307291
response_id = ""
308292
usage: Optional[Dict[str, int]] = None
309293
tool_idx_map: Dict[int, int] = {}
@@ -324,18 +308,10 @@ async def aiter_response_events_to_chat_chunks(
324308
delta = _get(event, "delta", "") or ""
325309
chunk_id = _get(event, "item_id", response_id)
326310
if delta:
327-
yield ChatCompletionChunk.construct(
328-
id=chunk_id,
329-
choices=[
330-
{
331-
"index": 0,
332-
"delta": {"content": delta},
333-
"finish_reason": None,
334-
}
335-
],
336-
created=int(time.time()),
311+
yield _build_chat_completion_chunk(
312+
chunk_id=chunk_id,
337313
model=model,
338-
object="chat.completion.chunk",
314+
delta={"content": delta},
339315
)
340316
continue
341317

@@ -345,7 +321,7 @@ async def aiter_response_events_to_chat_chunks(
345321
):
346322
item = _get(event, "item")
347323
if _get(item, "type") == "function_call":
348-
saw_tool_call = True
324+
has_tool_call = True
349325
out_idx = int(_get(event, "output_index", 0))
350326
mapped_idx = tool_idx_map.setdefault(
351327
out_idx, len(tool_idx_map)
@@ -363,18 +339,10 @@ async def aiter_response_events_to_chat_chunks(
363339
}
364340
tool_meta_emitted[out_idx] = True
365341
chunk_id = _get(item, "id", response_id)
366-
yield ChatCompletionChunk.construct(
367-
id=chunk_id,
368-
choices=[
369-
{
370-
"index": 0,
371-
"delta": {"tool_calls": [tc]},
372-
"finish_reason": None,
373-
}
374-
],
375-
created=int(time.time()),
342+
yield _build_chat_completion_chunk(
343+
chunk_id=chunk_id,
376344
model=model,
377-
object="chat.completion.chunk",
345+
delta={"tool_calls": [tc]},
378346
)
379347
else:
380348
if not tool_args_delta_seen.get(out_idx, False):
@@ -391,26 +359,18 @@ async def aiter_response_events_to_chat_chunks(
391359
tc["type"] = "function"
392360
tc["function"]["name"] = _get(item, "name", "")
393361
chunk_id = _get(item, "id", response_id)
394-
yield ChatCompletionChunk.construct(
395-
id=chunk_id,
396-
choices=[
397-
{
398-
"index": 0,
399-
"delta": {"tool_calls": [tc]},
400-
"finish_reason": None,
401-
}
402-
],
403-
created=int(time.time()),
362+
yield _build_chat_completion_chunk(
363+
chunk_id=chunk_id,
404364
model=model,
405-
object="chat.completion.chunk",
365+
delta={"tool_calls": [tc]},
406366
)
407367
continue
408368

409369
if event_type in (
410370
"response.function_call_arguments.delta",
411371
"response.output_item.function_call_arguments.delta",
412372
):
413-
saw_tool_call = True
373+
has_tool_call = True
414374
out_idx = int(_get(event, "output_index", 0))
415375
mapped_idx = tool_idx_map.setdefault(out_idx, len(tool_idx_map))
416376
tool_args_delta_seen[out_idx] = True
@@ -420,18 +380,10 @@ async def aiter_response_events_to_chat_chunks(
420380
"function": {"arguments": delta},
421381
}
422382
chunk_id = _get(event, "item_id", response_id)
423-
yield ChatCompletionChunk.construct(
424-
id=chunk_id,
425-
choices=[
426-
{
427-
"index": 0,
428-
"delta": {"tool_calls": [tc]},
429-
"finish_reason": None,
430-
}
431-
],
432-
created=int(time.time()),
383+
yield _build_chat_completion_chunk(
384+
chunk_id=chunk_id,
433385
model=model,
434-
object="chat.completion.chunk",
386+
delta={"tool_calls": [tc]},
435387
)
436388
continue
437389

@@ -442,30 +394,22 @@ async def aiter_response_events_to_chat_chunks(
442394
usage = _usage_to_openai(_get(resp, "usage"))
443395
if on_response_completed is not None and response_id:
444396
on_response_completed(response_id)
445-
finish_reason = "tool_calls" if saw_tool_call else "stop"
446-
saw_finish_reason = True
447-
yield ChatCompletionChunk.construct(
448-
id=response_id,
449-
choices=[
450-
{
451-
"index": 0,
452-
"delta": {},
453-
"finish_reason": finish_reason,
454-
}
455-
],
456-
created=int(time.time()),
397+
finish_reason = "tool_calls" if has_tool_call else "stop"
398+
has_finish_reason = True
399+
yield _build_chat_completion_chunk(
400+
chunk_id=response_id,
457401
model=model,
458-
object="chat.completion.chunk",
402+
delta={},
403+
finish_reason=finish_reason,
459404
usage=usage,
460405
)
461406
continue
462407

463-
if not saw_finish_reason:
464-
yield ChatCompletionChunk.construct(
465-
id=response_id,
466-
choices=[{"index": 0, "delta": {}, "finish_reason": "stop"}],
467-
created=int(time.time()),
408+
if not has_finish_reason:
409+
yield _build_chat_completion_chunk(
410+
chunk_id=response_id,
468411
model=model,
469-
object="chat.completion.chunk",
412+
delta={},
413+
finish_reason="stop",
470414
usage=usage,
471415
)

0 commit comments

Comments
 (0)