Skip to content

Commit e004e37

Browse files
authored
Merge branch 'master' into single_step_summary
2 parents fb6a285 + d449281 commit e004e37

File tree

10 files changed

+1479
-0
lines changed

10 files changed

+1479
-0
lines changed

camel/configs/__init__.py

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -22,6 +22,7 @@
2222
from .cometapi_config import COMETAPI_API_PARAMS, CometAPIConfig
2323
from .crynux_config import CRYNUX_API_PARAMS, CrynuxConfig
2424
from .deepseek_config import DEEPSEEK_API_PARAMS, DeepSeekConfig
25+
from .function_gemma_config import FUNCTION_GEMMA_API_PARAMS, FunctionGemmaConfig
2526
from .gemini_config import Gemini_API_PARAMS, GeminiConfig
2627
from .groq_config import GROQ_API_PARAMS, GroqConfig
2728
from .internlm_config import INTERNLM_API_PARAMS, InternLMConfig
@@ -136,4 +137,6 @@
136137
'QIANFAN_API_PARAMS',
137138
'CrynuxConfig',
138139
'CRYNUX_API_PARAMS',
140+
'FunctionGemmaConfig',
141+
'FUNCTION_GEMMA_API_PARAMS',
139142
]
Lines changed: 59 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,59 @@
1+
# ========= Copyright 2023-2024 @ CAMEL-AI.org. All Rights Reserved. =========
2+
# Licensed under the Apache License, Version 2.0 (the "License");
3+
# you may not use this file except in compliance with the License.
4+
# You may obtain a copy of the License at
5+
#
6+
# http://www.apache.org/licenses/LICENSE-2.0
7+
#
8+
# Unless required by applicable law or agreed to in writing, software
9+
# distributed under the License is distributed on an "AS IS" BASIS,
10+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
11+
# See the License for the specific language governing permissions and
12+
# limitations under the License.
13+
# ========= Copyright 2023-2024 @ CAMEL-AI.org. All Rights Reserved. =========
14+
from __future__ import annotations
15+
16+
from typing import List, Optional
17+
18+
from camel.configs.base_config import BaseConfig
19+
20+
21+
class FunctionGemmaConfig(BaseConfig):
22+
r"""Defines the parameters for generating completions using FunctionGemma
23+
via Ollama's native API.
24+
25+
FunctionGemma uses a custom chat template format for function calling
26+
that differs from OpenAI's format. This config is used with Ollama's
27+
/api/generate endpoint.
28+
29+
Reference: https://github.com/ollama/ollama/blob/main/docs/api.md
30+
31+
Args:
32+
temperature (float, optional): Sampling temperature to use, between
33+
:obj:`0` and :obj:`2`. Higher values make the output more random,
34+
while lower values make it more focused and deterministic.
35+
(default: :obj:`None`)
36+
top_p (float, optional): An alternative to sampling with temperature,
37+
called nucleus sampling, where the model considers the results of
38+
the tokens with top_p probability mass. (default: :obj:`0.95`)
39+
top_k (int, optional): Limits the next token selection to the K most
40+
probable tokens. (default: :obj:`64`)
41+
num_predict (int, optional): Maximum number of tokens to generate.
42+
(default: :obj:`None`)
43+
stop (list, optional): Sequences where the model will stop generating
44+
further tokens. (default: :obj:`None`)
45+
seed (int, optional): Random seed for reproducibility.
46+
(default: :obj:`None`)
47+
"""
48+
49+
temperature: Optional[float] = None
50+
top_p: Optional[float] = 0.95
51+
top_k: Optional[int] = 64
52+
num_predict: Optional[int] = None
53+
stop: Optional[List[str]] = None
54+
seed: Optional[int] = None
55+
56+
57+
FUNCTION_GEMMA_API_PARAMS = {
58+
param for param in FunctionGemmaConfig.model_fields.keys()
59+
}

camel/models/__init__.py

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -25,6 +25,7 @@
2525
from .crynux_model import CrynuxModel
2626
from .deepseek_model import DeepSeekModel
2727
from .fish_audio_model import FishAudioModel
28+
from .function_gemma_model import FunctionGemmaModel
2829
from .gemini_model import GeminiModel
2930
from .groq_model import GroqModel
3031
from .internlm_model import InternLMModel
@@ -112,4 +113,5 @@
112113
'QianfanModel',
113114
'CrynuxModel',
114115
'AihubMixModel',
116+
'FunctionGemmaModel',
115117
]

0 commit comments

Comments
 (0)