Skip to content

Commit 43235f9

Browse files
Merge branch 'master' into feat/openai-compatible-server
2 parents 648661d + e6f6105 commit 43235f9

39 files changed

+4398
-1697
lines changed

.github/ISSUE_TEMPLATE/bug_report.yml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -26,7 +26,7 @@ body:
2626
attributes:
2727
label: What version of camel are you using?
2828
description: Run command `python3 -c 'print(__import__("camel").__version__)'` in your shell and paste the output here.
29-
placeholder: E.g., 0.2.79
29+
placeholder: E.g., 0.2.80a3
3030
validations:
3131
required: true
3232

camel/__init__.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -14,7 +14,7 @@
1414

1515
from camel.logger import disable_logging, enable_logging, set_log_level
1616

17-
__version__ = '0.2.79'
17+
__version__ = '0.2.80a3'
1818

1919
__all__ = [
2020
'__version__',

camel/agents/_types.py

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -27,6 +27,7 @@ class ToolCallRequest(BaseModel):
2727
tool_name: str
2828
args: Dict[str, Any]
2929
tool_call_id: str
30+
extra_content: Optional[Dict[str, Any]] = None
3031

3132

3233
class ModelResponse(BaseModel):

camel/agents/chat_agent.py

Lines changed: 240 additions & 74 deletions
Large diffs are not rendered by default.

camel/configs/deepseek_config.py

Lines changed: 3 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -96,13 +96,12 @@ class DeepSeekConfig(BaseConfig):
9696
tool_choice: Optional[Union[dict[str, str], str]] = None
9797
logprobs: Optional[bool] = None
9898
top_logprobs: Optional[int] = None
99+
stream_options: Optional[dict[str, bool]] = None
99100

100101
def __init__(self, include_usage: bool = True, **kwargs):
102+
if kwargs.get("stream") and "stream_options" not in kwargs:
103+
kwargs["stream_options"] = {"include_usage": include_usage}
101104
super().__init__(**kwargs)
102-
# Only set stream_options when stream is True
103-
# Otherwise, it will raise error when calling the API
104-
if self.stream:
105-
self.stream_options = {"include_usage": include_usage}
106105

107106

108107
DEEPSEEK_API_PARAMS = {param for param in DeepSeekConfig.model_fields.keys()}

camel/messages/base.py

Lines changed: 10 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -71,8 +71,10 @@ class BaseMessage:
7171
images associated with the message. (default: :obj:`auto`)
7272
video_detail (Literal["auto", "low", "high"]): Detail level of the
7373
videos associated with the message. (default: :obj:`auto`)
74-
parsed: Optional[Union[Type[BaseModel], dict]]: Optional object which
74+
parsed (Optional[Union[Type[BaseModel], dict]]): Optional object which
7575
is parsed from the content. (default: :obj:`None`)
76+
reasoning_content (Optional[str]): Optional reasoning trace associated
77+
with the message. (default: :obj:`None`)
7678
"""
7779

7880
role_name: str
@@ -85,6 +87,7 @@ class BaseMessage:
8587
image_detail: Literal["auto", "low", "high"] = "auto"
8688
video_detail: Literal["auto", "low", "high"] = "auto"
8789
parsed: Optional[Union[BaseModel, dict]] = None
90+
reasoning_content: Optional[str] = None
8891

8992
@classmethod
9093
def make_user_message(
@@ -219,6 +222,12 @@ def create_new_instance(self, content: str) -> "BaseMessage":
219222
role_type=self.role_type,
220223
meta_dict=self.meta_dict,
221224
content=content,
225+
video_bytes=self.video_bytes,
226+
image_list=self.image_list,
227+
image_detail=self.image_detail,
228+
video_detail=self.video_detail,
229+
parsed=self.parsed,
230+
reasoning_content=self.reasoning_content,
222231
)
223232

224233
def __add__(self, other: Any) -> Union["BaseMessage", Any]:

camel/messages/func_message.py

Lines changed: 20 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -54,13 +54,17 @@ class FunctionCallingMessage(BaseMessage):
5454
mask_output (Optional[bool]): Whether to return a sanitized placeholder
5555
instead of the raw tool output.
5656
(default: :obj:`False`)
57+
extra_content (Optional[Dict[str, Any]]): Additional content
58+
associated with the tool call.
59+
(default: :obj:`None`)
5760
"""
5861

5962
func_name: Optional[str] = None
6063
args: Optional[Dict] = None
6164
result: Optional[Any] = None
6265
tool_call_id: Optional[str] = None
6366
mask_output: Optional[bool] = False
67+
extra_content: Optional[Dict[str, Any]] = None
6468

6569
@classmethod
6670
def make_tool_message(
@@ -147,19 +151,23 @@ def to_openai_assistant_message(self) -> OpenAIAssistantMessage:
147151
" due to missing function name or arguments."
148152
)
149153

154+
tool_call = {
155+
"id": self.tool_call_id or "null",
156+
"type": "function",
157+
"function": {
158+
"name": self.func_name,
159+
"arguments": json.dumps(self.args, ensure_ascii=False),
160+
},
161+
}
162+
163+
# Include extra_content if available
164+
if self.extra_content is not None:
165+
tool_call["extra_content"] = self.extra_content
166+
150167
return {
151168
"role": "assistant",
152169
"content": self.content or "",
153-
"tool_calls": [
154-
{
155-
"id": self.tool_call_id or "null",
156-
"type": "function",
157-
"function": {
158-
"name": self.func_name,
159-
"arguments": json.dumps(self.args, ensure_ascii=False),
160-
},
161-
}
162-
],
170+
"tool_calls": [tool_call], # type: ignore[list-item]
163171
}
164172

165173
def to_openai_tool_message(self) -> OpenAIToolMessageParam:
@@ -203,4 +211,6 @@ def to_dict(self) -> Dict:
203211
if self.tool_call_id is not None:
204212
base["tool_call_id"] = self.tool_call_id
205213
base["mask_output"] = self.mask_output
214+
if self.extra_content is not None:
215+
base["extra_content"] = self.extra_content
206216
return base

camel/models/deepseek_model.py

Lines changed: 2 additions & 40 deletions
Original file line numberDiff line numberDiff line change
@@ -165,44 +165,6 @@ def _prepare_request(
165165

166166
return request_config
167167

168-
def _post_handle_response(
169-
self, response: ChatCompletion
170-
) -> ChatCompletion:
171-
r"""Handle reasoning content with <think> tags at the beginning."""
172-
if (
173-
self.model_type in [ModelType.DEEPSEEK_REASONER]
174-
and os.environ.get("GET_REASONING_CONTENT", "false").lower()
175-
== "true"
176-
):
177-
reasoning_content = response.choices[0].message.reasoning_content # type: ignore[attr-defined]
178-
combined_content = ( # type: ignore[operator]
179-
f"<think>\n{reasoning_content}\n</think>\n"
180-
if reasoning_content
181-
else ""
182-
) + response.choices[0].message.content
183-
184-
response = ChatCompletion.construct(
185-
id=response.id,
186-
choices=[
187-
dict(
188-
index=response.choices[0].index,
189-
message={
190-
"role": response.choices[0].message.role,
191-
"content": combined_content,
192-
"tool_calls": None,
193-
},
194-
finish_reason=response.choices[0].finish_reason
195-
if response.choices[0].finish_reason
196-
else None,
197-
)
198-
],
199-
created=response.created,
200-
model=response.model,
201-
object="chat.completion",
202-
usage=response.usage,
203-
)
204-
return response
205-
206168
@observe()
207169
def _run(
208170
self,
@@ -244,7 +206,7 @@ def _run(
244206
**request_config,
245207
)
246208

247-
return self._post_handle_response(response)
209+
return response
248210

249211
@observe()
250212
async def _arun(
@@ -286,4 +248,4 @@ async def _arun(
286248
**request_config,
287249
)
288250

289-
return self._post_handle_response(response)
251+
return response

camel/models/fish_audio_model.py

Lines changed: 6 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -44,6 +44,12 @@ def __init__(
4444
self._url = url or os.environ.get(
4545
"FISHAUDIO_API_BASE_URL", "https://api.fish.audio"
4646
)
47+
if self._api_key is None:
48+
raise ValueError(
49+
"API key is required for FishAudio. Please provide it via "
50+
"the 'api_key' parameter or set the 'FISHAUDIO_API_KEY' "
51+
"environment variable."
52+
)
4753
self.session = Session(apikey=self._api_key, base_url=self._url)
4854

4955
def text_to_speech(

0 commit comments

Comments
 (0)