11
11
from enum import Enum
12
12
from typing import List , Union , Dict , Any , Optional
13
13
14
+ from .exceptions import ToolhouseError
14
15
from .net .environment import Environment
15
16
from .services .tools import Tools
17
+ from .services .local_tools import LocalTools
16
18
from .models .Provider import Provider as ProviderModel
17
19
from .models .RunToolsRequest import RunToolsRequest
18
20
from .models .GetToolsRequest import GetToolsRequest
19
- from ._exceptions import ToolhouseError
20
21
try :
21
22
from .models .OpenAIStream import stream_to_chat_completion
22
23
except ImportError :
@@ -71,7 +72,12 @@ def __init__(self, access_token: Optional[str] = None,
71
72
self .metadata : Dict [str , Any ] = {}
72
73
self .set_base_url (environment .value if isinstance (
73
74
environment , Environment ) else environment )
75
+ self .local_tools : LocalTools = LocalTools ()
74
76
77
+ def register_local_tool (self , local_tool ):
78
+ """Register Local Tools"""
79
+ return self .local_tools .register_local_tool (local_tool )
80
+
75
81
def set_metadata (self , key : str , value ) -> None :
76
82
"""
77
83
Sets User Metadata
@@ -159,24 +165,15 @@ def run_tools(self, response, append: bool = True, stream=False) -> List:
159
165
160
166
if tool_calls :
161
167
for tool in tool_calls :
162
- run_tool_request = RunToolsRequest (
163
- tool , self .provider , self .metadata )
164
- run_response = self .tools .run_tools (run_tool_request )
165
- messages .append (run_response .content )
166
-
167
- elif self .provider in ("openai_assistants" , ProviderModel .OPENAI_ASSISTANTS ):
168
- if 'submit_tool_outputs' not in response .required_action :
169
- return []
170
-
171
- submit_tool_outputs = response .required_action .submit_tool_outputs
172
- tool_calls = getattr (submit_tool_outputs , 'tool_calls' , None )
173
- if tool_calls :
174
- for tool in tool_calls :
175
- run_tool_request = RunToolsRequest (
176
- tool , self .provider , self .metadata )
177
- run_response = self .tools .run_tools (run_tool_request )
178
- messages .append (run_response .content )
179
-
168
+ if tool .function .name in self .local_tools .get_registered_tools ():
169
+ result = self .local_tools .run_tools (tool )
170
+ messages .append (result .model_dump ())
171
+ else :
172
+ run_tool_request = RunToolsRequest (
173
+ tool , self .provider , self .metadata )
174
+ run_response = self .tools .run_tools (run_tool_request )
175
+ messages .append (run_response .content )
176
+
180
177
elif self .provider in ("anthropic" , ProviderModel .ANTHROPIC ):
181
178
if response .stop_reason != 'tool_use' :
182
179
return []
@@ -186,11 +183,15 @@ def run_tools(self, response, append: bool = True, stream=False) -> List:
186
183
if tool .type == "tool_use" :
187
184
if stream :
188
185
tool = tool .model_dump ()
189
- run_tool_request = RunToolsRequest (
190
- tool , self .provider , self .metadata )
191
- run_response = self .tools .run_tools (run_tool_request )
192
- message ['content' ].append (run_response .content )
193
-
186
+ if tool .name in self .local_tools .get_registered_tools ():
187
+ result = self .local_tools .run_tools (tool )
188
+ message ['content' ].append (result .model_dump ())
189
+ else :
190
+ run_tool_request = RunToolsRequest (
191
+ tool , self .provider , self .metadata )
192
+ run_response = self .tools .run_tools (run_tool_request )
193
+ output = run_response .content
194
+ message ['content' ].append (output )
194
195
if message ['content' ]:
195
196
if append :
196
197
messages .append ({'role' : 'assistant' , 'content' : response .content })
0 commit comments