Skip to content

Commit f521fe8

Browse files
Merge remote-tracking branch 'main/main' into dev/home_animation
2 parents 013461b + fa942bb commit f521fe8

14 files changed

Lines changed: 657 additions & 209 deletions

File tree

.prettierignore

Lines changed: 6 additions & 23 deletions
Original file line numberDiff line numberDiff line change
@@ -1,23 +1,6 @@
1-
node_modules
2-
dist
3-
build
4-
*.min.js
5-
*.min.css
6-
coverage
7-
.next
8-
.nuxt
9-
.vuepress/dist
10-
.serverless
11-
.fusebox
12-
.dynamodb
13-
.tern-port
14-
*.log
15-
*.lock
16-
package-lock.json
17-
yarn.lock
18-
pnpm-lock.yaml
19-
__pycache__
20-
*.pyc
21-
.git
22-
.vscode
23-
.idea
1+
node_modules/**
2+
dist/**
3+
build/**
4+
.vitepress/**
5+
docs/.vitepress/cache/**
6+
docs/.vitepress/dist/**

eslint.config.mjs

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -15,6 +15,9 @@ export default tseslint.config(
1515
'.next/**',
1616
'.nuxt/**',
1717
'.vuepress/dist/**',
18+
'.vitepress/**',
19+
'docs/.vitepress/cache/**',
20+
'docs/.vitepress/dist/**',
1821
'.serverless/**',
1922
'.fusebox/**',
2023
'.dynamodb/**',

packages/app/friday/args.py

Lines changed: 24 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,21 @@
11
# -*- coding: utf-8 -*-
2+
import json
23
from argparse import ArgumentParser, Namespace
34

45

6+
def json_type(value: str) -> dict:
7+
"""Parse a JSON string into a dictionary."""
8+
if not value or value == "":
9+
return {}
10+
try:
11+
result = json.loads(value)
12+
if not isinstance(result, dict):
13+
raise ValueError("JSON must be an object/dictionary")
14+
return result
15+
except json.JSONDecodeError as e:
16+
raise ValueError(f"Invalid JSON string: {e}")
17+
18+
519
def get_args() -> Namespace:
620
"""Get the command line arguments for the script."""
721
parser = ArgumentParser(description="Arguments for friday")
@@ -36,9 +50,16 @@ def get_args() -> Namespace:
3650
required=True,
3751
)
3852
parser.add_argument(
39-
"--baseUrl",
40-
type=str,
41-
required=False,
53+
"--clientKwargs",
54+
type=json_type,
55+
default={},
56+
help="A JSON string representing a dictionary of keyword arguments to pass to the LLM client.",
57+
)
58+
parser.add_argument(
59+
"--generateKwargs",
60+
type=json_type,
61+
default={},
62+
help="A JSON string representing a dictionary of keyword arguments to pass to the LLM generate method.",
4263
)
4364
args = parser.parse_args()
4465
return args

packages/app/friday/main.py

Lines changed: 1 addition & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -89,7 +89,7 @@ async def main():
8989
)
9090

9191
# get model from args
92-
model = get_model(args.llmProvider, args.modelName, args.apiKey, args.baseUrl)
92+
model = get_model(args.llmProvider, args.modelName, args.apiKey, args.clientKwargs, args.generateKwargs)
9393
formatter = get_formatter(args.llmProvider)
9494

9595
# Create the ReAct agent
@@ -108,7 +108,6 @@ async def main():
108108
# Workflow Process
109109
1. Analyze user query and make a plan
110110
2. Carry out your plan step by step
111-
3. Call `{finish_function}` with your final response
112111
113112
# Response Guidelines
114113
- Be concise and focused on user's specific context
@@ -129,7 +128,6 @@ async def main():
129128
- Current date and time: {current_time}""".format(
130129
current_time=datetime.now().strftime("%Y-%m-%d %H:%M:%S"),
131130
max_turns=20,
132-
finish_function="generate_response",
133131
),
134132
model=model,
135133
formatter=formatter,

packages/app/friday/model.py

Lines changed: 54 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,7 @@
11
# -*- coding: utf-8 -*-
22
"""Get the formatter and model based on the model provider."""
3+
import re
4+
import agentscope
35
from agentscope.formatter import (
46
DashScopeChatFormatter,
57
OpenAIChatFormatter,
@@ -18,6 +20,29 @@
1820
)
1921

2022

23+
def is_agentscope_version_ge(target_version: tuple) -> bool:
24+
"""
25+
Check if the current agentscope version is greater than or equal to the target version.
26+
27+
Args:
28+
target_version: A tuple of (major, minor, patch) version numbers.
29+
30+
Returns:
31+
True if current version >= target version, False otherwise.
32+
33+
Example:
34+
>>> is_agentscope_version_ge((1, 0, 9)) # Works with "1.0.9" or "1.0.9dev"
35+
True
36+
"""
37+
version_str = agentscope.__version__
38+
version_match = re.match(r'^(\d+)\.(\d+)\.(\d+)', version_str)
39+
if version_match:
40+
major, minor, patch = map(int, version_match.groups())
41+
current_version = (major, minor, patch)
42+
return current_version >= target_version
43+
return False
44+
45+
2146
def get_formatter(llmProvider: str) -> FormatterBase:
2247
"""Get the formatter based on the model provider."""
2348
match llmProvider.lower():
@@ -36,7 +61,13 @@ def get_formatter(llmProvider: str) -> FormatterBase:
3661
f"Unsupported model provider: {llmProvider}. "
3762
)
3863

39-
def get_model(llmProvider:str, modelName: str, apiKey: str, baseUrl: str = None) -> ChatModelBase:
64+
def get_model(
65+
llmProvider: str,
66+
modelName: str,
67+
apiKey: str,
68+
client_kwargs: dict = {},
69+
generate_kwargs: dict = {},
70+
) -> ChatModelBase:
4071
"""Get the model instance based on the input arguments."""
4172

4273
match llmProvider.lower():
@@ -45,34 +76,47 @@ def get_model(llmProvider:str, modelName: str, apiKey: str, baseUrl: str = None)
4576
model_name=modelName,
4677
api_key=apiKey,
4778
stream=True,
79+
generate_kwargs=generate_kwargs,
4880
)
4981
case "openai":
50-
client_args = {}
51-
if baseUrl:
52-
client_args["base_url"] = baseUrl
5382
return OpenAIChatModel(
5483
model_name=modelName,
5584
api_key=apiKey,
5685
stream=True,
57-
client_args=client_args,
86+
client_kwargs=client_kwargs,
87+
generate_kwargs=generate_kwargs,
5888
)
5989
case "ollama":
60-
return OllamaChatModel(
61-
model_name=modelName,
62-
stream=True,
63-
host=baseUrl,
64-
)
90+
if is_agentscope_version_ge((1, 0, 9)):
91+
# For agentscope >= 1.0.9
92+
return OllamaChatModel(
93+
model_name=modelName,
94+
stream=True,
95+
client_kwargs=client_kwargs,
96+
generate_kwargs=generate_kwargs,
97+
)
98+
else:
99+
# For agentscope < 1.0.9
100+
return OllamaChatModel(
101+
model_name=modelName,
102+
stream=True,
103+
**client_kwargs,
104+
)
65105
case "gemini":
66106
return GeminiChatModel(
67107
model_name=modelName,
68108
api_key=apiKey,
69109
stream=True,
110+
client_kwargs=client_kwargs,
111+
generate_kwargs=generate_kwargs,
70112
)
71113
case "anthropic":
72114
return AnthropicChatModel(
73115
model_name=modelName,
74116
api_key=apiKey,
75117
stream=True,
118+
client_kwargs=client_kwargs,
119+
generate_kwargs=generate_kwargs,
76120
)
77121
case _:
78122
raise ValueError(

packages/client/src/i18n/en.json

Lines changed: 12 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -114,12 +114,22 @@
114114
"help": {
115115
"friday": {
116116
"api-key": "The {{llmProvider}} API key, used to access the LLM API provider",
117+
"base-url": "The base URL for the LLM API provider",
118+
"base-url-placeholder": "e.g. https://api.openai.com/v1,http://localhost:11434",
119+
"client-kwargs": "The keyword arguments for the API client initialization, e.g. \"base_url\" for OpenAI client",
120+
"generate-kwargs": "The keyword arguments passed when actually calling the LLM API, e.g. \"temperature\", \"max_tokens\", etc.",
117121
"llm-provider": "Select the LLM API provider",
118122
"model-name": "The {{llmProvider}} model used to generate response",
119123
"python-env": "Absolute path to your Python env (3.10+), e.g. /usr/bin/python",
120124
"write-permission": "Whether allow Friday to modify your local files. If allowed, Friday will equip a tool to write text files",
121-
"base-url": "The base URL for the LLM API provider",
122-
"base-url-placeholder": "e.g. https://api.openai.com/v1,http://localhost:11434"
125+
"key-name": "Key Name",
126+
"missing-key-name": "Missing key name",
127+
"type-name": "Type Name",
128+
"missing-type-name": "Missing type name",
129+
"value-name": "Value Name",
130+
"missing-value-name": "Missing value name",
131+
"check-only-number": "Only numbers can be entered",
132+
"add-form-list-btn": "Add keyword argument"
123133
}
124134
},
125135
"hint": {

packages/client/src/i18n/zh.json

Lines changed: 12 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -114,12 +114,22 @@
114114
"help": {
115115
"friday": {
116116
"api-key": "用于访问 {{llmProvider}} LLM API 的密钥",
117+
"base-url": "LLM API 提供商的基础 URL",
118+
"base-url-placeholder": "例如 https://api.openai.com/v1,http://localhost:11434",
119+
"client-kwargs": "API client 初始化的关键字参数,例如 OpenAI 客户端的 \"base_url\" 字段",
120+
"generate-kwargs": "实际调用 LLM API 时传入的关键字参数,例如 \"temperature\"\"max_tokens\"",
117121
"llm-provider": "选择 LLM API 提供商",
118122
"model-name": "用于生成回复的{{llmProvider}}模型",
119123
"python-env": "Python 环境的绝对路径(3.10+),例如 /usr/bin/python",
120124
"write-permission": "是否允许 Friday 修改你的本地文件,如果允许,Friday 将会装备一个写文件的工具",
121-
"base-url": "LLM API 提供商的基础 URL",
122-
"base-url-placeholder": "例如 https://api.openai.com/v1,http://localhost:11434"
125+
"key-name": "Key 名称",
126+
"missing-key-name": "缺少 Key 名称",
127+
"type-name": "类型名称",
128+
"missing-type-name": "缺少类型",
129+
"value-name": "值名称",
130+
"missing-value-name": "缺少值名称",
131+
"check-only-number": "只能输入数字",
132+
"add-form-list-btn": "添加关键字参数"
123133
}
124134
},
125135
"hint": {

0 commit comments

Comments
 (0)