Skip to content

Commit aac3fa2

Browse files
feat: Add support for 01 model platform (#1093)
Co-authored-by: Wendong-Fan <[email protected]> Co-authored-by: Wendong <[email protected]>
1 parent d176883 commit aac3fa2

21 files changed

+402
-108
lines changed

camel/configs/__init__.py

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -28,6 +28,7 @@
2828
)
2929
from .togetherai_config import TOGETHERAI_API_PARAMS, TogetherAIConfig
3030
from .vllm_config import VLLM_API_PARAMS, VLLMConfig
31+
from .yi_config import YI_API_PARAMS, YiConfig
3132
from .zhipuai_config import ZHIPUAI_API_PARAMS, ZhipuAIConfig
3233

3334
__all__ = [
@@ -58,4 +59,6 @@
5859
'SAMBA_CLOUD_API_PARAMS',
5960
'TogetherAIConfig',
6061
'TOGETHERAI_API_PARAMS',
62+
'YiConfig',
63+
'YI_API_PARAMS',
6164
]

camel/configs/yi_config.py

Lines changed: 58 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,58 @@
1+
# =========== Copyright 2023 @ CAMEL-AI.org. All Rights Reserved. ===========
2+
# Licensed under the Apache License, Version 2.0 (the “License”);
3+
# you may not use this file except in compliance with the License.
4+
# You may obtain a copy of the License at
5+
#
6+
# http://www.apache.org/licenses/LICENSE-2.0
7+
#
8+
# Unless required by applicable law or agreed to in writing, software
9+
# distributed under the License is distributed on an “AS IS” BASIS,
10+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
11+
# See the License for the specific language governing permissions and
12+
# limitations under the License.
13+
# =========== Copyright 2023 @ CAMEL-AI.org. All Rights Reserved. ===========
14+
from __future__ import annotations
15+
16+
from typing import Optional, Union
17+
18+
from camel.configs.base_config import BaseConfig
19+
from camel.types import NOT_GIVEN, NotGiven
20+
21+
22+
class YiConfig(BaseConfig):
23+
r"""Defines the parameters for generating chat completions using the
24+
Yi API. You can refer to the following link for more details:
25+
https://platform.lingyiwanwu.com/docs/api-reference
26+
27+
Args:
28+
tool_choice (Union[dict[str, str], str], optional): Controls which (if
29+
any) tool is called by the model. :obj:`"none"` means the model
30+
will not call any tool and instead generates a message.
31+
:obj:`"auto"` means the model can pick between generating a
32+
message or calling one or more tools. :obj:`"required"` or
33+
specifying a particular tool via
34+
{"type": "function", "function": {"name": "some_function"}}
35+
can be used to guide the model to use tools more strongly.
36+
(default: :obj:`None`)
37+
max_tokens (int, optional): Specifies the maximum number of tokens
38+
the model can generate. This sets an upper limit, but does not
39+
guarantee that this number will always be reached.
40+
(default: :obj:`5000`)
41+
top_p (float, optional): Controls the randomness of the generated
42+
results. Lower values lead to less randomness, while higher
43+
values increase randomness. (default: :obj:`0.9`)
44+
temperature (float, optional): Controls the diversity and focus of
45+
the generated results. Lower values make the output more focused,
46+
while higher values make it more diverse. (default: :obj:`0.3`)
47+
stream (bool, optional): If True, enables streaming output.
48+
(default: :obj:`False`)
49+
"""
50+
51+
tool_choice: Optional[Union[dict[str, str], str]] = None
52+
max_tokens: Union[int, NotGiven] = NOT_GIVEN
53+
top_p: float = 0.9
54+
temperature: float = 0.3
55+
stream: bool = False
56+
57+
58+
YI_API_PARAMS = {param for param in YiConfig.model_fields.keys()}

camel/models/__init__.py

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -29,6 +29,7 @@
2929
from .stub_model import StubModel
3030
from .togetherai_model import TogetherAIModel
3131
from .vllm_model import VLLMModel
32+
from .yi_model import YiModel
3233
from .zhipuai_model import ZhipuAIModel
3334

3435
__all__ = [
@@ -51,4 +52,5 @@
5152
'RekaModel',
5253
'SambaModel',
5354
'TogetherAIModel',
55+
'YiModel',
5456
]

camel/models/model_factory.py

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -28,6 +28,7 @@
2828
from camel.models.stub_model import StubModel
2929
from camel.models.togetherai_model import TogetherAIModel
3030
from camel.models.vllm_model import VLLMModel
31+
from camel.models.yi_model import YiModel
3132
from camel.models.zhipuai_model import ZhipuAIModel
3233
from camel.types import ModelPlatformType, ModelType, UnifiedModelType
3334
from camel.utils import BaseTokenCounter
@@ -108,6 +109,8 @@ def create(
108109
model_class = RekaModel
109110
elif model_type == ModelType.STUB:
110111
model_class = StubModel
112+
elif model_platform.is_yi and model_type.is_yi:
113+
model_class = YiModel
111114

112115
if model_class is None:
113116
raise ValueError(

camel/models/yi_model.py

Lines changed: 138 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,138 @@
1+
# =========== Copyright 2023 @ CAMEL-AI.org. All Rights Reserved. ===========
2+
# Licensed under the Apache License, Version 2.0 (the “License”);
3+
# you may not use this file except in compliance with the License.
4+
# You may obtain a copy of the License at
5+
#
6+
# http://www.apache.org/licenses/LICENSE-2.0
7+
#
8+
# Unless required by applicable law or agreed to in writing, software
9+
# distributed under the License is distributed on an “AS IS” BASIS,
10+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
11+
# See the License for the specific language governing permissions and
12+
# limitations under the License.
13+
# =========== Copyright 2023 @ CAMEL-AI.org. All Rights Reserved. ===========
14+
15+
import os
16+
from typing import Any, Dict, List, Optional, Union
17+
18+
from openai import OpenAI, Stream
19+
20+
from camel.configs import YI_API_PARAMS, YiConfig
21+
from camel.messages import OpenAIMessage
22+
from camel.models import BaseModelBackend
23+
from camel.types import (
24+
ChatCompletion,
25+
ChatCompletionChunk,
26+
ModelType,
27+
)
28+
from camel.utils import (
29+
BaseTokenCounter,
30+
OpenAITokenCounter,
31+
api_keys_required,
32+
)
33+
34+
35+
class YiModel(BaseModelBackend):
36+
r"""Yi API in a unified BaseModelBackend interface.
37+
38+
Args:
39+
model_type (Union[ModelType, str]): Model for which a backend is
40+
created, one of Yi series.
41+
model_config_dict (Optional[Dict[str, Any]], optional): A dictionary
42+
that will be fed into:obj:`openai.ChatCompletion.create()`. If
43+
:obj:`None`, :obj:`YiConfig().as_dict()` will be used.
44+
(default: :obj:`None`)
45+
api_key (Optional[str], optional): The API key for authenticating with
46+
the Yi service. (default: :obj:`None`)
47+
url (Optional[str], optional): The url to the Yi service.
48+
(default: :obj:`https://api.lingyiwanwu.com/v1`)
49+
token_counter (Optional[BaseTokenCounter], optional): Token counter to
50+
use for the model. If not provided, :obj:`OpenAITokenCounter(
51+
ModelType.GPT_4O_MINI)` will be used.
52+
(default: :obj:`None`)
53+
"""
54+
55+
def __init__(
56+
self,
57+
model_type: Union[ModelType, str],
58+
model_config_dict: Optional[Dict[str, Any]] = None,
59+
api_key: Optional[str] = None,
60+
url: Optional[str] = None,
61+
token_counter: Optional[BaseTokenCounter] = None,
62+
) -> None:
63+
if model_config_dict is None:
64+
model_config_dict = YiConfig().as_dict()
65+
api_key = api_key or os.environ.get("YI_API_KEY")
66+
url = url or os.environ.get(
67+
"YI_API_BASE_URL", "https://api.lingyiwanwu.com/v1"
68+
)
69+
super().__init__(
70+
model_type, model_config_dict, api_key, url, token_counter
71+
)
72+
self._client = OpenAI(
73+
timeout=60,
74+
max_retries=3,
75+
api_key=self._api_key,
76+
base_url=self._url,
77+
)
78+
79+
@api_keys_required("YI_API_KEY")
80+
def run(
81+
self,
82+
messages: List[OpenAIMessage],
83+
) -> Union[ChatCompletion, Stream[ChatCompletionChunk]]:
84+
r"""Runs inference of Yi chat completion.
85+
86+
Args:
87+
messages (List[OpenAIMessage]): Message list with the chat history
88+
in OpenAI API format.
89+
90+
Returns:
91+
Union[ChatCompletion, Stream[ChatCompletionChunk]]:
92+
`ChatCompletion` in the non-stream mode, or
93+
`Stream[ChatCompletionChunk]` in the stream mode.
94+
"""
95+
response = self._client.chat.completions.create(
96+
messages=messages,
97+
model=self.model_type,
98+
**self.model_config_dict,
99+
)
100+
return response
101+
102+
@property
103+
def token_counter(self) -> BaseTokenCounter:
104+
r"""Initialize the token counter for the model backend.
105+
106+
Returns:
107+
OpenAITokenCounter: The token counter following the model's
108+
tokenization style.
109+
"""
110+
111+
if not self._token_counter:
112+
self._token_counter = OpenAITokenCounter(ModelType.GPT_4O_MINI)
113+
return self._token_counter
114+
115+
def check_model_config(self):
116+
r"""Check whether the model configuration contains any
117+
unexpected arguments to Yi API.
118+
119+
Raises:
120+
ValueError: If the model configuration dictionary contains any
121+
unexpected arguments to Yi API.
122+
"""
123+
for param in self.model_config_dict:
124+
if param not in YI_API_PARAMS:
125+
raise ValueError(
126+
f"Unexpected argument `{param}` is "
127+
"input into Yi model backend."
128+
)
129+
130+
@property
131+
def stream(self) -> bool:
132+
r"""Returns whether the model is in stream mode, which sends partial
133+
results each time.
134+
135+
Returns:
136+
bool: Whether the model is in stream mode.
137+
"""
138+
return self.model_config_dict.get('stream', False)

camel/types/enums.py

Lines changed: 45 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -87,6 +87,17 @@ class ModelType(UnifiedModelType, Enum):
8787
REKA_FLASH = "reka-flash"
8888
REKA_EDGE = "reka-edge"
8989

90+
# Yi models (01-ai)
91+
YI_LIGHTNING = "yi-lightning"
92+
YI_LARGE = "yi-large"
93+
YI_MEDIUM = "yi-medium"
94+
YI_LARGE_TURBO = "yi-large-turbo"
95+
YI_VISION = "yi-vision"
96+
YI_MEDIUM_200K = "yi-medium-200k"
97+
YI_SPARK = "yi-spark"
98+
YI_LARGE_RAG = "yi-large-rag"
99+
YI_LARGE_FC = "yi-large-fc"
100+
90101
def __str__(self):
91102
return self.value
92103

@@ -220,6 +231,25 @@ def is_reka(self) -> bool:
220231
ModelType.REKA_FLASH,
221232
}
222233

234+
@property
235+
def is_yi(self) -> bool:
236+
r"""Returns whether this type of models is Yi model.
237+
238+
Returns:
239+
bool: Whether this type of models is Yi.
240+
"""
241+
return self in {
242+
ModelType.YI_LIGHTNING,
243+
ModelType.YI_LARGE,
244+
ModelType.YI_MEDIUM,
245+
ModelType.YI_LARGE_TURBO,
246+
ModelType.YI_VISION,
247+
ModelType.YI_MEDIUM_200K,
248+
ModelType.YI_SPARK,
249+
ModelType.YI_LARGE_RAG,
250+
ModelType.YI_LARGE_FC,
251+
}
252+
223253
@property
224254
def token_limit(self) -> int:
225255
r"""Returns the maximum token limit for a given model.
@@ -249,13 +279,21 @@ def token_limit(self) -> int:
249279
return 8_192
250280
elif self in {
251281
ModelType.GPT_3_5_TURBO,
282+
ModelType.YI_LIGHTNING,
283+
ModelType.YI_MEDIUM,
284+
ModelType.YI_LARGE_TURBO,
285+
ModelType.YI_VISION,
286+
ModelType.YI_SPARK,
287+
ModelType.YI_LARGE_RAG,
252288
}:
253289
return 16_384
254290
elif self in {
255291
ModelType.MISTRAL_CODESTRAL,
256292
ModelType.MISTRAL_7B,
257293
ModelType.MISTRAL_MIXTRAL_8x7B,
258294
ModelType.GROQ_MIXTRAL_8_7B,
295+
ModelType.YI_LARGE,
296+
ModelType.YI_LARGE_FC,
259297
}:
260298
return 32_768
261299
elif self in {ModelType.MISTRAL_MIXTRAL_8x22B}:
@@ -290,6 +328,7 @@ def token_limit(self) -> int:
290328
ModelType.CLAUDE_3_SONNET,
291329
ModelType.CLAUDE_3_HAIKU,
292330
ModelType.CLAUDE_3_5_SONNET,
331+
ModelType.YI_MEDIUM_200K,
293332
}:
294333
return 200_000
295334
elif self in {
@@ -445,6 +484,7 @@ class ModelPlatformType(Enum):
445484
TOGETHER = "together"
446485
OPENAI_COMPATIBLE_MODEL = "openai-compatible-model"
447486
SAMBA = "samba-nova"
487+
YI = "lingyiwanwu"
448488

449489
@property
450490
def is_openai(self) -> bool:
@@ -517,6 +557,11 @@ def is_samba(self) -> bool:
517557
r"""Returns whether this platform is Samba Nova."""
518558
return self is ModelPlatformType.SAMBA
519559

560+
@property
561+
def is_yi(self) -> bool:
562+
r"""Returns whether this platform is Yi."""
563+
return self is ModelPlatformType.YI
564+
520565

521566
class AudioModelType(Enum):
522567
TTS_1 = "tts-1"

camel/types/unified_model_type.py

Lines changed: 5 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -98,6 +98,11 @@ def is_reka(self) -> bool:
9898
r"""Returns whether the model is a Reka model."""
9999
return True
100100

101+
@property
102+
def is_yi(self) -> bool:
103+
r"""Returns whether the model is a Yi model."""
104+
return True
105+
101106
@property
102107
def support_native_tool_calling(self) -> bool:
103108
r"""Returns whether the model supports native tool calling."""

docs/key_modules/models.md

Lines changed: 9 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -41,6 +41,15 @@ The following table lists currently supported model platforms by CAMEL.
4141
| Anthropic | claude-2.0 | N |
4242
| Gemini | gemini-1.5-pro | Y |
4343
| Gemini | ggemini-1.5-flash | Y |
44+
| Lingyiwanwu | yi-lightning | N |
45+
| Lingyiwanwu | yi-large | N |
46+
| Lingyiwanwu | yi-medium | N |
47+
| Lingyiwanwu | yi-large-turbo | N |
48+
| Lingyiwanwu | yi-vision | Y |
49+
| Lingyiwanwu | yi-medium-200k | N |
50+
| Lingyiwanwu | yi-spark | N |
51+
| Lingyiwanwu | yi-large-rag | N |
52+
| Lingyiwanwu | yi-large-fc | N |
4453
| ZhipuAI | glm-4v | Y |
4554
| ZhipuAI | glm-4 | N |
4655
| ZhipuAI | glm-3-turbo | N |

examples/models/azure_openai_model_example.py

Lines changed: 3 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -13,7 +13,6 @@
1313
# =========== Copyright 2023 @ CAMEL-AI.org. All Rights Reserved. ===========
1414
from camel.agents import ChatAgent
1515
from camel.configs import ChatGPTConfig
16-
from camel.messages import BaseMessage
1716
from camel.models import ModelFactory
1817
from camel.types import ModelPlatformType, ModelType
1918

@@ -32,19 +31,13 @@
3231
)
3332

3433
# Define system message
35-
sys_msg = BaseMessage.make_assistant_message(
36-
role_name="Assistant",
37-
content="You are a helpful assistant.",
38-
)
34+
sys_msg = "You are a helpful assistant."
3935

4036
# Set agent
4137
camel_agent = ChatAgent(system_message=sys_msg, model=model)
4238

43-
user_msg = BaseMessage.make_user_message(
44-
role_name="User",
45-
content="""Say hi to CAMEL AI, one open-source community dedicated to the
46-
study of autonomous and communicative agents.""",
47-
)
39+
user_msg = """Say hi to CAMEL AI, one open-source community dedicated to the
40+
study of autonomous and communicative agents."""
4841

4942
# Get response information
5043
response = camel_agent.step(user_msg)

0 commit comments

Comments
 (0)