Skip to content

Commit e39f088

Browse files
author
邱梓咸
committed
添加硅基流动api测试
1 parent 74e5e68 commit e39f088

File tree

1 file changed

+59
-7
lines changed

1 file changed

+59
-7
lines changed

tests/test_llms/test_gptstyleapi.py

+59-7
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,4 @@
1-
from lagent.llms import GPTStyleAPI
1+
from lagent.llms import GPTStyleAPI,GPTAPI
22

33
def chat_xinfrence():
44
api_base = 'http://192.168.26.213:13000/v1/chat/completions' # oneapi
@@ -98,7 +98,8 @@ def chat_lmdeploy():
9898

9999
def chat_oneapi():
100100
api_base = 'http://192.168.26.213:13000/v1/chat/completions' # oneapi
101-
model_name = "deepseek-r1-14b"
101+
# model_name = "deepseek-r1-14b"
102+
model_name = "Baichuan2-Turbo"
102103
gpttool = GPTStyleAPI(
103104
model_type=model_name,
104105
api_base=api_base,
@@ -120,6 +121,30 @@ def chat_oneapi():
120121
}])
121122
print(res)
122123

124+
def chat_siliconflow():
125+
api_base = 'https://api.siliconflow.cn/v1/chat/completions' # oneapi
126+
# model_name = "deepseek-r1-14b"
127+
model_name = "deepseek-ai/DeepSeek-R1-Distill-Qwen-7B"
128+
gpttool = GPTStyleAPI(
129+
model_type=model_name,
130+
api_base=api_base,
131+
key="sk-srirwcmjqmbmyttandxidrtmlfqpxcigyacoabutufvdkkgl",
132+
retry=3,
133+
meta_template=None,
134+
max_new_tokens=512,
135+
top_p=0.8,
136+
top_k=40,
137+
temperature=0.8,
138+
repetition_penalty=1,
139+
stream=False,
140+
stop_words=None,
141+
)
142+
res = gpttool.chat(inputs=[
143+
{
144+
"role": "user",
145+
"content": "世界第一高峰是"
146+
}])
147+
print(res)
123148
def stream_chat_ollama():
124149
api_base = 'http://192.168.26.212:11434/api/chat' # ollama
125150
model_name = "qwen:7b"
@@ -134,7 +159,6 @@ def stream_chat_ollama():
134159
top_k=40,
135160
temperature=0.8,
136161
repetition_penalty=1,
137-
stream=False,
138162
stop_words=None,
139163
)
140164
res = gpttool.stream_chat(inputs=[
@@ -147,7 +171,9 @@ def stream_chat_ollama():
147171

148172
def stream_chat_oneapi():
149173
api_base = 'http://192.168.26.213:13000/v1/chat/completions' # oneapi
150-
model_name = "deepseek-r1-14b"
174+
# model_name = "deepseek-r1-14b"
175+
model_name = "Baichuan2-Turbo"
176+
# model_name = "qwen:7b"
151177
gpttool = GPTStyleAPI(
152178
model_type=model_name,
153179
api_base=api_base,
@@ -159,7 +185,31 @@ def stream_chat_oneapi():
159185
top_k=40,
160186
temperature=0.8,
161187
repetition_penalty=1,
162-
stream=False,
188+
stop_words=None,
189+
)
190+
res = gpttool.stream_chat(inputs=[
191+
{
192+
"role": "user",
193+
"content": "世界第一高峰是"
194+
}])
195+
for status, content, _ in res:
196+
print(content, end='', flush=True)
197+
198+
def stream_chat_siliconflow():
199+
api_base = 'https://api.siliconflow.cn/v1/chat/completions' # oneapi
200+
# model_name = "deepseek-r1-14b"
201+
model_name = "deepseek-ai/DeepSeek-R1-Distill-Qwen-7B"
202+
gpttool = GPTStyleAPI(
203+
model_type=model_name,
204+
api_base=api_base,
205+
key="sk-srirwcmjqmbmyttandxidrtmlfqpxcigyacoabutufvdkkgl",
206+
retry=3,
207+
meta_template=None,
208+
max_new_tokens=512,
209+
top_p=0.8,
210+
top_k=40,
211+
temperature=0.8,
212+
repetition_penalty=1,
163213
stop_words=None,
164214
)
165215
res = gpttool.stream_chat(inputs=[
@@ -175,8 +225,10 @@ def stream_chat_oneapi():
175225
# chat_direct()
176226
# chat_ollama()
177227
# chat_oneapi()
178-
chat_lmdeploy()
228+
# chat_lmdeploy()
229+
# chat_siliconflow()
179230

180231
# #流式输出测试
181232
# stream_chat_ollama()
182-
# stream_chat_oneapi()
233+
# stream_chat_oneapi()
234+
stream_chat_siliconflow()

0 commit comments

Comments
 (0)