Skip to content

Commit bda6437

Browse files
authored
feat: Add Siliconflow API support (#96)
- Integrate Siliconflow API client - Add authentication and API endpoints - Implement basic API operations
1 parent b116a79 commit bda6437

File tree

3 files changed

+50
-4
lines changed

3 files changed

+50
-4
lines changed

.env.example

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -3,4 +3,5 @@ ANTHROPIC_API_KEY=your_anthropic_api_key_here
33
DEEPSEEK_API_KEY=your_deepseek_api_key_here
44
GOOGLE_API_KEY=your_google_api_key_here
55
AZURE_OPENAI_API_KEY=your_azure_openai_api_key_here
6-
AZURE_OPENAI_MODEL_DEPLOYMENT=gpt-4o-ms
6+
AZURE_OPENAI_MODEL_DEPLOYMENT=gpt-4o-ms
7+
SILICONFLOW_API_KEY=your_siliconflow_api_key_here

tests/test_llm_api.py

Lines changed: 34 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -112,14 +112,23 @@ def setUp(self):
112112
self.mock_gemini_model.generate_content.return_value = self.mock_gemini_response
113113
self.mock_gemini_client.GenerativeModel.return_value = self.mock_gemini_model
114114

115+
# Set up SiliconFlow-style response
116+
self.mock_siliconflow_response = MagicMock()
117+
self.mock_siliconflow_choice = MagicMock()
118+
self.mock_siliconflow_message = MagicMock()
119+
self.mock_siliconflow_message.content = "Test Siliconflow response"
120+
self.mock_siliconflow_choice.message = self.mock_siliconflow_message
121+
self.mock_siliconflow_response.choices = [self.mock_siliconflow_choice]
122+
115123
# Mock environment variables
116124
self.env_patcher = patch.dict('os.environ', {
117125
'OPENAI_API_KEY': 'test-openai-key',
118126
'DEEPSEEK_API_KEY': 'test-deepseek-key',
119127
'ANTHROPIC_API_KEY': 'test-anthropic-key',
120128
'GOOGLE_API_KEY': 'test-google-key',
121129
'AZURE_OPENAI_API_KEY': 'test-azure-key',
122-
'AZURE_OPENAI_MODEL_DEPLOYMENT': 'test-model-deployment'
130+
'AZURE_OPENAI_MODEL_DEPLOYMENT': 'test-model-deployment',
131+
'SILICONFLOW_API_KEY': 'test-siliconflow-key'
123132
})
124133
self.env_patcher.start()
125134

@@ -167,6 +176,17 @@ def test_create_deepseek_client(self, mock_openai):
167176
)
168177
self.assertEqual(client, self.mock_openai_client)
169178

179+
@unittest.skipIf(skip_llm_tests, skip_message)
180+
@patch('tools.llm_api.OpenAI')
181+
def test_create_siliconflow_client(self, mock_openai):
182+
mock_openai.return_value = self.mock_openai_client
183+
client = create_llm_client("siliconflow")
184+
mock_openai.assert_called_once_with(
185+
api_key='test-siliconflow-key',
186+
base_url="https://api.siliconflow.cn/v1"
187+
)
188+
self.assertEqual(client, self.mock_openai_client)
189+
170190
@unittest.skipIf(skip_llm_tests, skip_message)
171191
@patch('tools.llm_api.Anthropic')
172192
def test_create_anthropic_client(self, mock_anthropic):
@@ -234,6 +254,19 @@ def test_query_deepseek(self, mock_create_client):
234254
temperature=0.7
235255
)
236256

257+
@unittest.skipIf(skip_llm_tests, skip_message)
258+
@patch('tools.llm_api.create_llm_client')
259+
def test_query_siliconflow(self, mock_create_client):
260+
self.mock_openai_client.chat.completions.create.return_value = self.mock_siliconflow_response
261+
mock_create_client.return_value = self.mock_openai_client
262+
response = query_llm("Test prompt", provider="siliconflow")
263+
self.assertEqual(response, "Test Siliconflow response")
264+
self.mock_openai_client.chat.completions.create.assert_called_once_with(
265+
model="deepseek-ai/DeepSeek-R1",
266+
messages=[{"role": "user", "content": [{"type": "text", "text": "Test prompt"}]}],
267+
temperature=0.7
268+
)
269+
237270
@unittest.skipIf(skip_llm_tests, skip_message)
238271
@patch('tools.llm_api.create_llm_client')
239272
def test_query_anthropic(self, mock_create_client):

tools/llm_api.py

Lines changed: 14 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -90,6 +90,14 @@ def create_llm_client(provider="openai"):
9090
api_key=api_key,
9191
base_url="https://api.deepseek.com/v1",
9292
)
93+
elif provider == "siliconflow":
94+
api_key = os.getenv('SILICONFLOW_API_KEY')
95+
if not api_key:
96+
raise ValueError("SILICONFLOW_API_KEY not found in environment variables")
97+
return OpenAI(
98+
api_key=api_key,
99+
base_url="https://api.siliconflow.cn/v1"
100+
)
93101
elif provider == "anthropic":
94102
api_key = os.getenv('ANTHROPIC_API_KEY')
95103
if not api_key:
@@ -137,14 +145,16 @@ def query_llm(prompt: str, client=None, model=None, provider="openai", image_pat
137145
model = os.getenv('AZURE_OPENAI_MODEL_DEPLOYMENT', 'gpt-4o-ms') # Get from env with fallback
138146
elif provider == "deepseek":
139147
model = "deepseek-chat"
148+
elif provider == "siliconflow":
149+
model = "deepseek-ai/DeepSeek-R1"
140150
elif provider == "anthropic":
141151
model = "claude-3-sonnet-20240229"
142152
elif provider == "gemini":
143153
model = "gemini-pro"
144154
elif provider == "local":
145155
model = "Qwen/Qwen2.5-32B-Instruct-AWQ"
146156

147-
if provider in ["openai", "local", "deepseek", "azure"]:
157+
if provider in ["openai", "local", "deepseek", "azure", "siliconflow"]:
148158
messages = [{"role": "user", "content": []}]
149159

150160
# Add text content
@@ -232,7 +242,7 @@ def query_llm(prompt: str, client=None, model=None, provider="openai", image_pat
232242
def main():
233243
parser = argparse.ArgumentParser(description='Query an LLM with a prompt')
234244
parser.add_argument('--prompt', type=str, help='The prompt to send to the LLM', required=True)
235-
parser.add_argument('--provider', choices=['openai','anthropic','gemini','local','deepseek','azure'], default='openai', help='The API provider to use')
245+
parser.add_argument('--provider', choices=['openai','anthropic','gemini','local','deepseek','azure','siliconflow'], default='openai', help='The API provider to use')
236246
parser.add_argument('--model', type=str, help='The model to use (default depends on provider)')
237247
parser.add_argument('--image', type=str, help='Path to an image file to attach to the prompt')
238248
args = parser.parse_args()
@@ -242,6 +252,8 @@ def main():
242252
args.model = "gpt-4o"
243253
elif args.provider == "deepseek":
244254
args.model = "deepseek-chat"
255+
elif args.provider == "siliconflow":
256+
args.model = "deepseek-ai/DeepSeek-R1"
245257
elif args.provider == 'anthropic':
246258
args.model = "claude-3-5-sonnet-20241022"
247259
elif args.provider == 'gemini':

0 commit comments

Comments
 (0)