Skip to content

Commit 421db81

Browse files
authored
chore: better aysnc tool execution support (#3461)
1 parent 2561f5c commit 421db81

File tree

8 files changed

+144
-127
lines changed

8 files changed

+144
-127
lines changed

.github/ISSUE_TEMPLATE/bug_report.yml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -26,7 +26,7 @@ body:
2626
attributes:
2727
label: What version of camel are you using?
2828
description: Run command `python3 -c 'print(__import__("camel").__version__)'` in your shell and paste the output here.
29-
placeholder: E.g., 0.2.80a3
29+
placeholder: E.g., 0.2.80
3030
validations:
3131
required: true
3232

camel/__init__.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -14,7 +14,7 @@
1414

1515
from camel.logger import disable_logging, enable_logging, set_log_level
1616

17-
__version__ = '0.2.80a3'
17+
__version__ = '0.2.80'
1818

1919
__all__ = [
2020
'__version__',

camel/agents/chat_agent.py

Lines changed: 6 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -3914,7 +3914,8 @@ async def _aexecute_tool(
39143914

39153915
else:
39163916
# Fallback: synchronous call
3917-
result = tool(**args)
3917+
loop = asyncio.get_running_loop()
3918+
result = await loop.run_in_executor(None, lambda: tool(**args))
39183919

39193920
except Exception as e:
39203921
# Capture the error message to prevent framework crash
@@ -4741,7 +4742,10 @@ async def _aexecute_tool_from_stream_data(
47414742

47424743
else:
47434744
# Fallback: synchronous call
4744-
result = tool(**args)
4745+
loop = asyncio.get_running_loop()
4746+
result = await loop.run_in_executor(
4747+
None, lambda: tool(**args)
4748+
)
47454749

47464750
# Create the tool response message
47474751
func_msg = FunctionCallingMessage(

camel/toolkits/function_tool.py

Lines changed: 10 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -12,10 +12,12 @@
1212
# limitations under the License.
1313
# ========= Copyright 2023-2024 @ CAMEL-AI.org. All Rights Reserved. =========
1414
import ast
15+
import asyncio
1516
import inspect
1617
import logging
1718
import textwrap
1819
import warnings
20+
from concurrent.futures import ThreadPoolExecutor
1921
from inspect import Parameter, getsource, signature
2022
from typing import Any, Callable, Dict, Mapping, Optional, Tuple, Type
2123

@@ -31,6 +33,9 @@
3133

3234
logger = logging.getLogger(__name__)
3335

36+
# Shared thread pool for running sync tools without blocking the event loop
37+
_SYNC_TOOL_EXECUTOR = ThreadPoolExecutor(max_workers=64)
38+
3439

3540
def _remove_a_key(d: Dict, remove_key: Any) -> None:
3641
r"""Remove a key from a dictionary recursively."""
@@ -500,7 +505,11 @@ async def async_call(self, *args: Any, **kwargs: Any) -> Any:
500505
if self.is_async:
501506
return await self.func(*args, **kwargs)
502507
else:
503-
return self.func(*args, **kwargs)
508+
# Run sync function in executor to avoid blocking event loop
509+
loop = asyncio.get_running_loop()
510+
return await loop.run_in_executor(
511+
_SYNC_TOOL_EXECUTOR, lambda: self.func(*args, **kwargs)
512+
)
504513

505514
@property
506515
def is_async(self) -> bool:

camel/types/enums.py

Lines changed: 9 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -49,7 +49,7 @@ class ModelType(UnifiedModelType, Enum):
4949
O4_MINI = "o4-mini"
5050
O3 = "o3"
5151
O3_PRO = "o3-pro"
52-
GPT_5_1_Instant = "gpt-5.1"
52+
GPT_5_1 = "gpt-5.1"
5353
GPT_5 = "gpt-5"
5454
GPT_5_MINI = "gpt-5-mini"
5555
GPT_5_NANO = "gpt-5-nano"
@@ -603,6 +603,7 @@ def is_openai(self) -> bool:
603603
ModelType.GPT_5_NANO,
604604
ModelType.O4_MINI,
605605
ModelType.O3,
606+
ModelType.GPT_5_1,
606607
}
607608

608609
@property
@@ -1393,9 +1394,6 @@ def token_limit(self) -> int:
13931394
ModelType.O1_PREVIEW,
13941395
ModelType.O1_MINI,
13951396
ModelType.GPT_4_5_PREVIEW,
1396-
ModelType.GPT_5,
1397-
ModelType.GPT_5_NANO,
1398-
ModelType.GPT_5_MINI,
13991397
ModelType.MISTRAL_LARGE,
14001398
ModelType.MISTRAL_NEMO,
14011399
ModelType.MISTRAL_PIXTRAL_12B,
@@ -1561,6 +1559,13 @@ def token_limit(self) -> int:
15611559
ModelType.NETMIND_LLAMA_4_SCOUT_17B_16E_INSTRUCT,
15621560
}:
15631561
return 320_000
1562+
elif self in {
1563+
ModelType.GPT_5_1,
1564+
ModelType.GPT_5_MINI,
1565+
ModelType.GPT_5_NANO,
1566+
ModelType.GPT_5,
1567+
}:
1568+
return 400_000
15641569
elif self in {
15651570
ModelType.OPENROUTER_LLAMA_4_SCOUT_FREE,
15661571
ModelType.NETMIND_LLAMA_4_MAVERICK_17B_128E_INSTRUCT,

docs/conf.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -27,7 +27,7 @@
2727
project = 'CAMEL'
2828
copyright = '2024, CAMEL-AI.org'
2929
author = 'CAMEL-AI.org'
30-
release = '0.2.80a3'
30+
release = '0.2.80'
3131

3232
html_favicon = (
3333
'https://raw.githubusercontent.com/camel-ai/camel/master/misc/favicon.png'

pyproject.toml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -4,7 +4,7 @@ build-backend = "hatchling.build"
44

55
[project]
66
name = "camel-ai"
7-
version = "0.2.80a3"
7+
version = "0.2.80"
88
description = "Communicative Agents for AI Society Study"
99
authors = [{ name = "CAMEL-AI.org" }]
1010
requires-python = ">=3.10,<3.15"

uv.lock

Lines changed: 115 additions & 116 deletions
Some generated files are not rendered by default. Learn more about customizing how changed files appear on GitHub.

0 commit comments

Comments
 (0)