@@ -6,6 +6,8 @@ defmodule LLMAgent.Providers.Anthropic do
6
6
API calls, response parsing, and error handling specific to Anthropic's API.
7
7
"""
8
8
9
+ alias LLMAgent.Providers.OpenAI
10
+
9
11
@ doc """
10
12
Makes an Anthropic chat completion API call.
11
13
@@ -19,44 +21,38 @@ defmodule LLMAgent.Providers.Anthropic do
19
21
- `{:error, reason}` - On failure, returns the error reason
20
22
"""
21
23
def completion ( params ) do
22
- try do
23
- api_key = Map . get ( params , :api_key ) || System . get_env ( "ANTHROPIC_API_KEY" )
24
-
25
- # If no API key, return error
26
- if is_nil ( api_key ) do
27
- { :error , "Missing Anthropic API key" }
28
- else
29
- # Extract request parameters
30
- messages = Map . get ( params , :messages , [ ] )
31
- tools = Map . get ( params , :tools , [ ] )
32
- model = Map . get ( params , :model , "claude-3-opus-20240229" )
33
-
34
- # Format request body
35
- request_body = % {
36
- model: model ,
37
- messages: format_messages ( messages ) ,
38
- temperature: Map . get ( params , :temperature , 0.7 ) ,
39
- max_tokens: Map . get ( params , :max_tokens , 1000 )
40
- }
24
+ api_key = Map . get ( params , :api_key ) || System . get_env ( "ANTHROPIC_API_KEY" )
25
+
26
+ # If no API key, return error
27
+ if is_nil ( api_key ) do
28
+ { :error , "Missing Anthropic API key" }
29
+ else
30
+ # Extract request parameters
31
+ messages = Map . get ( params , :messages , [ ] )
32
+ tools = Map . get ( params , :tools , [ ] )
33
+ model = Map . get ( params , :model , "claude-3-opus-20240229" )
34
+
35
+ # Format request body
36
+ request_body = % {
37
+ model: model ,
38
+ messages: format_messages ( messages ) ,
39
+ temperature: Map . get ( params , :temperature , 0.7 ) ,
40
+ max_tokens: Map . get ( params , :max_tokens , 1000 )
41
+ }
41
42
42
- # Add tools if provided
43
- request_body =
44
- if length ( tools ) > 0 do
45
- Map . put ( request_body , :tools , format_tools ( tools ) )
46
- else
47
- request_body
48
- end
43
+ # Add tools if provided
44
+ request_body =
45
+ if length ( tools ) > 0 do
46
+ Map . put ( request_body , :tools , format_tools ( tools ) )
47
+ else
48
+ request_body
49
+ end
49
50
50
- # Mock Anthropic API response
51
- response = mock_anthropic_response ( request_body )
51
+ # Mock Anthropic API response
52
+ response = mock_anthropic_response ( request_body )
52
53
53
- # Parse response
54
- parsed_response = parse_anthropic_response ( response )
55
- { :ok , parsed_response }
56
- end
57
- rescue
58
- e ->
59
- { :error , "Error processing Anthropic request: #{ inspect ( e ) } " }
54
+ # Parse response
55
+ { :ok , parse_anthropic_response ( response ) }
60
56
end
61
57
end
62
58
@@ -93,7 +89,7 @@ defmodule LLMAgent.Providers.Anthropic do
93
89
case provider do
94
90
:openai ->
95
91
# Delegate to OpenAI embedding
96
- LLMAgent.Providers. OpenAI. embedding ( params )
92
+ OpenAI . embedding ( params )
97
93
98
94
_ ->
99
95
# Return mock embeddings if no valid provider is specified
@@ -186,60 +182,37 @@ defmodule LLMAgent.Providers.Anthropic do
186
182
end
187
183
188
184
defp parse_anthropic_response ( response ) do
189
- # Extract the relevant parts of the Anthropic response
190
-
191
- # Get the content parts
192
- content_parts = get_in ( response , [ :content ] ) || [ ]
193
-
194
- # Check if the response contains tool calls
195
- tool_calls = get_in ( response , [ :tool_use ] ) || [ ]
196
-
197
- if length ( tool_calls ) > 0 do
198
- # Parse tool calls
199
- parsed_tool_calls =
200
- Enum . map ( tool_calls , fn tool_call ->
185
+ case response do
186
+ % { content: content } = resp when is_list ( content ) ->
187
+ # 检查是否有工具调用
188
+ if Map . has_key? ( resp , :tool_calls ) and length ( resp . tool_calls ) > 0 do
189
+ # 包含工具调用的响应
201
190
% {
202
- id: tool_call . id || "tool-#{ System . unique_integer ( [ :positive ] ) } " ,
203
- name: tool_call . name ,
204
- arguments: parse_tool_arguments ( tool_call . input )
191
+ content: extract_text_content ( content ) ,
192
+ tool_calls: resp . tool_calls
205
193
}
206
- end )
207
-
208
- # Extract text content
209
- text_content = extract_text_content ( content_parts )
194
+ else
195
+ # 普通文本响应
196
+ % {
197
+ content: extract_text_content ( content ) ,
198
+ tool_calls: [ ]
199
+ }
200
+ end
210
201
211
- % { content: text_content , tool_calls: parsed_tool_calls }
212
- else
213
- # Regular response, just extract the text
214
- text_content = extract_text_content ( content_parts )
215
- % { content: text_content , tool_calls: [ ] }
202
+ _ ->
203
+ # 处理其他类型的响应或错误
204
+ % {
205
+ content: "Unable to parse response" ,
206
+ tool_calls: [ ]
207
+ }
216
208
end
217
209
end
218
210
219
211
defp extract_text_content ( content_parts ) do
220
212
# Combine all text parts into a single string
221
213
content_parts
222
214
|> Enum . filter ( fn part -> part . type == "text" end )
223
- |> Enum . map ( fn part -> part . text end )
224
- |> Enum . join ( "\n " )
225
- end
226
-
227
- defp parse_tool_arguments ( arguments ) when is_map ( arguments ) do
228
- # Arguments are already a map in Anthropic's format
229
- arguments
230
- end
231
-
232
- defp parse_tool_arguments ( arguments ) when is_binary ( arguments ) do
233
- # Parse JSON arguments if they're a string
234
- case Jason . decode ( arguments ) do
235
- { :ok , parsed } -> parsed
236
- { :error , _ } -> % { raw_arguments: arguments }
237
- end
238
- end
239
-
240
- defp parse_tool_arguments ( arguments ) do
241
- # Fallback for other formats
242
- % { raw_arguments: inspect ( arguments ) }
215
+ |> Enum . map_join ( "\n " , fn part -> part . text end )
243
216
end
244
217
245
218
# Mock implementations for when Anthropic module is not available
@@ -271,7 +244,16 @@ defmodule LLMAgent.Providers.Anthropic do
271
244
usage: % {
272
245
input_tokens: 100 ,
273
246
output_tokens: 100
274
- }
247
+ } ,
248
+ tool_calls: [
249
+ % {
250
+ id: "tool_use_mock-id" ,
251
+ name: "get_current_weather" ,
252
+ arguments: % {
253
+ location: "San Francisco, CA"
254
+ }
255
+ }
256
+ ]
275
257
}
276
258
else
277
259
# Mock a standard response
0 commit comments