Skip to content

Commit 773d889

Browse files
committed
Refactor timeout option naming for consistency; improve code readability and organization in LLMAgent module
1 parent cdc2c7e commit 773d889

File tree

6 files changed

+242
-318
lines changed

6 files changed

+242
-318
lines changed

lib/llm_agent.ex

+1-1
Original file line numberDiff line numberDiff line change
@@ -110,7 +110,7 @@ defmodule LLMAgent do
110110
"""
111111
def process(flow, state, message, options \\ []) do
112112
signal = LLMAgent.Signals.user_message(message)
113-
timeout = Keyword.get(options, :timeout_ms, 30000)
113+
timeout = Keyword.get(options, :timeout, 30_000)
114114

115115
AgentForge.Flow.process_with_limits(flow, signal, state, timeout_ms: timeout)
116116
end

lib/llm_agent/flows.ex

+2-2
Original file line numberDiff line numberDiff line change
@@ -7,7 +7,7 @@ defmodule LLMAgent.Flows do
77
appropriate handlers, and configures initial state.
88
"""
99

10-
alias LLMAgent.{Store, Handlers, Signals}
10+
alias LLMAgent.{Handlers, Signals, Store}
1111

1212
@doc """
1313
Creates a standard conversation flow with the given system prompt and tools.
@@ -82,7 +82,7 @@ defmodule LLMAgent.Flows do
8282
"""
8383
def task_flow(task_definition, options \\ []) do
8484
# Extract options
85-
_timeout_ms = Keyword.get(options, :timeout_ms, 60000)
85+
_task_timeout = Keyword.get(options, :task_timeout, 60_000)
8686

8787
# Create flow
8888
fn signal, state ->

lib/llm_agent/providers/anthropic.ex

+63-81
Original file line numberDiff line numberDiff line change
@@ -6,6 +6,8 @@ defmodule LLMAgent.Providers.Anthropic do
66
API calls, response parsing, and error handling specific to Anthropic's API.
77
"""
88

9+
alias LLMAgent.Providers.OpenAI
10+
911
@doc """
1012
Makes an Anthropic chat completion API call.
1113
@@ -19,44 +21,38 @@ defmodule LLMAgent.Providers.Anthropic do
1921
- `{:error, reason}` - On failure, returns the error reason
2022
"""
2123
def completion(params) do
22-
try do
23-
api_key = Map.get(params, :api_key) || System.get_env("ANTHROPIC_API_KEY")
24-
25-
# If no API key, return error
26-
if is_nil(api_key) do
27-
{:error, "Missing Anthropic API key"}
28-
else
29-
# Extract request parameters
30-
messages = Map.get(params, :messages, [])
31-
tools = Map.get(params, :tools, [])
32-
model = Map.get(params, :model, "claude-3-opus-20240229")
33-
34-
# Format request body
35-
request_body = %{
36-
model: model,
37-
messages: format_messages(messages),
38-
temperature: Map.get(params, :temperature, 0.7),
39-
max_tokens: Map.get(params, :max_tokens, 1000)
40-
}
24+
api_key = Map.get(params, :api_key) || System.get_env("ANTHROPIC_API_KEY")
25+
26+
# If no API key, return error
27+
if is_nil(api_key) do
28+
{:error, "Missing Anthropic API key"}
29+
else
30+
# Extract request parameters
31+
messages = Map.get(params, :messages, [])
32+
tools = Map.get(params, :tools, [])
33+
model = Map.get(params, :model, "claude-3-opus-20240229")
34+
35+
# Format request body
36+
request_body = %{
37+
model: model,
38+
messages: format_messages(messages),
39+
temperature: Map.get(params, :temperature, 0.7),
40+
max_tokens: Map.get(params, :max_tokens, 1000)
41+
}
4142

42-
# Add tools if provided
43-
request_body =
44-
if length(tools) > 0 do
45-
Map.put(request_body, :tools, format_tools(tools))
46-
else
47-
request_body
48-
end
43+
# Add tools if provided
44+
request_body =
45+
if length(tools) > 0 do
46+
Map.put(request_body, :tools, format_tools(tools))
47+
else
48+
request_body
49+
end
4950

50-
# Mock Anthropic API response
51-
response = mock_anthropic_response(request_body)
51+
# Mock Anthropic API response
52+
response = mock_anthropic_response(request_body)
5253

53-
# Parse response
54-
parsed_response = parse_anthropic_response(response)
55-
{:ok, parsed_response}
56-
end
57-
rescue
58-
e ->
59-
{:error, "Error processing Anthropic request: #{inspect(e)}"}
54+
# Parse response
55+
{:ok, parse_anthropic_response(response)}
6056
end
6157
end
6258

@@ -93,7 +89,7 @@ defmodule LLMAgent.Providers.Anthropic do
9389
case provider do
9490
:openai ->
9591
# Delegate to OpenAI embedding
96-
LLMAgent.Providers.OpenAI.embedding(params)
92+
OpenAI.embedding(params)
9793

9894
_ ->
9995
# Return mock embeddings if no valid provider is specified
@@ -186,60 +182,37 @@ defmodule LLMAgent.Providers.Anthropic do
186182
end
187183

188184
defp parse_anthropic_response(response) do
189-
# Extract the relevant parts of the Anthropic response
190-
191-
# Get the content parts
192-
content_parts = get_in(response, [:content]) || []
193-
194-
# Check if the response contains tool calls
195-
tool_calls = get_in(response, [:tool_use]) || []
196-
197-
if length(tool_calls) > 0 do
198-
# Parse tool calls
199-
parsed_tool_calls =
200-
Enum.map(tool_calls, fn tool_call ->
185+
case response do
186+
%{content: content} = resp when is_list(content) ->
187+
# 检查是否有工具调用
188+
if Map.has_key?(resp, :tool_calls) and length(resp.tool_calls) > 0 do
189+
# 包含工具调用的响应
201190
%{
202-
id: tool_call.id || "tool-#{System.unique_integer([:positive])}",
203-
name: tool_call.name,
204-
arguments: parse_tool_arguments(tool_call.input)
191+
content: extract_text_content(content),
192+
tool_calls: resp.tool_calls
205193
}
206-
end)
207-
208-
# Extract text content
209-
text_content = extract_text_content(content_parts)
194+
else
195+
# 普通文本响应
196+
%{
197+
content: extract_text_content(content),
198+
tool_calls: []
199+
}
200+
end
210201

211-
%{content: text_content, tool_calls: parsed_tool_calls}
212-
else
213-
# Regular response, just extract the text
214-
text_content = extract_text_content(content_parts)
215-
%{content: text_content, tool_calls: []}
202+
_ ->
203+
# 处理其他类型的响应或错误
204+
%{
205+
content: "Unable to parse response",
206+
tool_calls: []
207+
}
216208
end
217209
end
218210

219211
defp extract_text_content(content_parts) do
220212
# Combine all text parts into a single string
221213
content_parts
222214
|> Enum.filter(fn part -> part.type == "text" end)
223-
|> Enum.map(fn part -> part.text end)
224-
|> Enum.join("\n")
225-
end
226-
227-
defp parse_tool_arguments(arguments) when is_map(arguments) do
228-
# Arguments are already a map in Anthropic's format
229-
arguments
230-
end
231-
232-
defp parse_tool_arguments(arguments) when is_binary(arguments) do
233-
# Parse JSON arguments if they're a string
234-
case Jason.decode(arguments) do
235-
{:ok, parsed} -> parsed
236-
{:error, _} -> %{raw_arguments: arguments}
237-
end
238-
end
239-
240-
defp parse_tool_arguments(arguments) do
241-
# Fallback for other formats
242-
%{raw_arguments: inspect(arguments)}
215+
|> Enum.map_join("\n", fn part -> part.text end)
243216
end
244217

245218
# Mock implementations for when Anthropic module is not available
@@ -271,7 +244,16 @@ defmodule LLMAgent.Providers.Anthropic do
271244
usage: %{
272245
input_tokens: 100,
273246
output_tokens: 100
274-
}
247+
},
248+
tool_calls: [
249+
%{
250+
id: "tool_use_mock-id",
251+
name: "get_current_weather",
252+
arguments: %{
253+
location: "San Francisco, CA"
254+
}
255+
}
256+
]
275257
}
276258
else
277259
# Mock a standard response

0 commit comments

Comments
 (0)