Skip to content

Commit 64b4ec0

Browse files
authored
Merge pull request #1 from toolhouseai/feature/local_runner
Feature/local runner
2 parents 7bc8995 + 4e55457 commit 64b4ec0

15 files changed

+424
-27
lines changed

examples/sample_run_local_tools.py

+64
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,64 @@
1+
"""Antropic Sample"""
2+
import os
3+
from typing import List
4+
5+
from dotenv import load_dotenv
6+
from anthropic import Anthropic
7+
from anthropic.types import TextBlock
8+
from toolhouse import Toolhouse, Provider
9+
10+
load_dotenv()
11+
12+
TOKEN = os.getenv("ANTHROPIC_KEY")
13+
TH_TOKEN = os.getenv("TOOLHOUSE_BEARER_TOKEN")
14+
15+
local_tools = [
16+
{
17+
'name': 'hello',
18+
'description': 'The user receive a customized hello message from a city and return it to the user',
19+
'input_schema': {
20+
'type': 'object',
21+
'properties': {
22+
'city': {'type': 'string', 'description': 'The city where you are from'}
23+
},
24+
'required': ['city']
25+
}
26+
}
27+
]
28+
29+
client = Anthropic(api_key=TOKEN)
30+
31+
th = Toolhouse(access_token=TH_TOKEN, provider=Provider.ANTHROPIC)
32+
th.set_metadata("id", "fabio")
33+
th.set_metadata("timezone", 5)
34+
35+
36+
@th.register_local_tool("hello")
37+
def whatever(city: str):
38+
"""Return Local Time"""
39+
return f"Hello from {city}!!!"
40+
41+
42+
messages: List = [{
43+
"role": "user",
44+
"content":
45+
"Can I get an hello from Rome?"
46+
}]
47+
48+
response = client.messages.create(
49+
model="claude-3-5-sonnet-20240620",
50+
max_tokens=1024,
51+
tools=th.get_tools() + local_tools,
52+
messages=messages
53+
)
54+
55+
messages += th.run_tools(response)
56+
57+
response = client.messages.create(
58+
model="claude-3-5-sonnet-20240620",
59+
max_tokens=1024,
60+
tools=th.get_tools() + local_tools,
61+
messages=messages
62+
)
63+
if isinstance(response.content[0], TextBlock):
64+
print(response.content[0].text)
+60
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,60 @@
1+
"""Antropic Sample"""
2+
import os
3+
from typing import List
4+
from dotenv import load_dotenv
5+
from openai import OpenAI
6+
from toolhouse import Toolhouse
7+
load_dotenv()
8+
9+
TOKEN = os.getenv("OPENAI_KEY")
10+
TH_TOKEN = os.getenv("TOOLHOUSE_BEARER_TOKEN")
11+
12+
13+
local_tools = [
14+
{'type': 'function',
15+
'function':
16+
{
17+
'name': 'hello',
18+
'description': 'The user receive a customized hello message from a city and return it to the user',
19+
'parameters': {
20+
'type': 'object',
21+
'properties': {
22+
'city': {'type': 'string', 'description': 'The city where you are from'}
23+
}},
24+
'required': ['city']
25+
}}]
26+
27+
th = Toolhouse(access_token=TH_TOKEN, provider="openai")
28+
th.set_metadata("id", "fabio")
29+
th.set_metadata("timezone", 5)
30+
31+
32+
@th.register_local_tool("hello")
33+
def whatever(city: str):
34+
"""Return Local Time"""
35+
return f"Hello from {city}!!!"
36+
37+
38+
client = OpenAI(api_key=TOKEN)
39+
40+
messages: List = [{
41+
"role": "user",
42+
"content":
43+
"Can I get an hello from Rome?"
44+
}]
45+
46+
response = client.chat.completions.create(
47+
model='gpt-4o',
48+
messages=messages,
49+
tools=th.get_tools() + local_tools
50+
)
51+
52+
messages += th.run_tools(response)
53+
54+
response = client.chat.completions.create(
55+
model="gpt-4o",
56+
messages=messages,
57+
tools=th.get_tools() + local_tools
58+
)
59+
60+
print(response.choices[0].message.content)

pyproject.toml

+1-1
Original file line numberDiff line numberDiff line change
@@ -6,7 +6,7 @@ build-backend = "setuptools.build_meta"
66

77
[project]
88
name = "toolhouse"
9-
version = "1.0.0"
9+
version = "1.1.0"
1010
license = {file = "LICENSE"}
1111
authors = [
1212
{ name = "Toolhouse Technologies", email = "[email protected]" },

src/toolhouse/__init__.py

+1-1
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,7 @@
11
# This file was generated by liblab | https://liblab.com/
22
# flake8: noqa
33
from .sdk import Toolhouse
4-
from ._exceptions import ToolhouseError
4+
from .exceptions import ToolhouseError
55
from .net.environment import Environment
66
from .models.Provider import Provider
77
from .models import stream_to_chat_completion, OpenAIStream
File renamed without changes.

src/toolhouse/models/RunLocalTools.py

+30
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,30 @@
1+
from typing import Literal
2+
from pydantic import BaseModel
3+
4+
5+
class AnthropicToolResponse(BaseModel):
6+
"""Represents the results of a tool call for Anthropic."""
7+
8+
tool_use_id: str
9+
"""The ID of the tool call."""
10+
11+
content: str
12+
"""Result of the tool call."""
13+
14+
type: Literal["tool_result"]
15+
"""The type of tool call the output is required for."""
16+
17+
18+
class OpenAIToolResponse(BaseModel):
19+
"""Represents the results of a tool call for OpenAI."""
20+
21+
role: Literal['tool']
22+
23+
tool_call_id: str
24+
"""The ID of the tool call."""
25+
26+
name: str
27+
"""tool_function_name"""
28+
29+
content: str
30+
"""Result of the tool call."""
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,4 @@
1+
"""Tool Configuration Models"""
2+
from .openai import Tools as OpenAITools
3+
from .anthropic import Tools as AnthropicTools
4+
from .toolhouse import Tools as ToolhouseTools
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,30 @@
1+
from typing import Literal, List, Dict
2+
from pydantic import BaseModel
3+
4+
5+
ArgumentType = Literal["string", "number", "integer", "object", "array", "boolean"]
6+
7+
8+
class Property(BaseModel):
9+
"""Tool Property"""
10+
type: ArgumentType
11+
description: str
12+
13+
14+
class InputSchema(BaseModel):
15+
"""InputSchema"""
16+
type: Literal['object']
17+
properties: Dict[str, Property]
18+
required: List[str]
19+
20+
21+
class Tool(BaseModel):
22+
"""Anthropic Tools"""
23+
name: str
24+
description: str
25+
input_schema: InputSchema
26+
27+
28+
class Tools(BaseModel):
29+
"""ToolHouse"""
30+
tools: List[Tool]
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,36 @@
1+
from typing import Literal, List, Dict
2+
from pydantic import BaseModel
3+
4+
5+
ArgumentType = Literal["string", "number", "integer", "object", "array", "boolean"]
6+
7+
8+
class Property(BaseModel):
9+
"""Tool Property"""
10+
type: ArgumentType
11+
description: str
12+
13+
14+
class Paramenters(BaseModel):
15+
"""Parameters"""
16+
type: Literal["object"]
17+
properties: Dict[str, Property]
18+
19+
20+
class Function(BaseModel):
21+
"""InputSchema"""
22+
name: str
23+
description: str
24+
parameters: Paramenters
25+
required: List[str]
26+
27+
28+
class Tool(BaseModel):
29+
"""OpenAI Tools"""
30+
type: Literal["function"]
31+
function: Function
32+
33+
34+
class Tools(BaseModel):
35+
"""ToolHouse"""
36+
tools: List[Tool]
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,41 @@
1+
from typing import Literal, List
2+
from decimal import Decimal
3+
from pydantic import BaseModel, Field, HttpUrl
4+
5+
6+
ArgumentType = Literal["string", "number", "integer", "object", "array", "boolean"]
7+
ArgumentFrom = Literal["llm", "user", "metadata"]
8+
ToolType = Literal["local", "remote"]
9+
10+
11+
class Argument(BaseModel):
12+
"""Tool Arguments"""
13+
name: str
14+
type: ArgumentType
15+
source: ArgumentFrom
16+
label: str
17+
description: str
18+
required: bool
19+
20+
21+
class ToolHouseTool(BaseModel):
22+
"""ToolHouse Tools Configuration"""
23+
id: str
24+
publisher: str
25+
tool_type: ToolType = "remote"
26+
logo: str = Field(pattern=r'^data:image\/svg\+xml;base64,')
27+
title: str
28+
category: str
29+
short_description: str
30+
long_description: str
31+
price_per_execution: Decimal = Field(gt=0)
32+
star_rating: Decimal = Field(ge=0, le=5)
33+
executions: int
34+
url: HttpUrl
35+
description_for_model: str
36+
arguments: List[Argument]
37+
38+
39+
class Tools(BaseModel):
40+
"""ToolHouse"""
41+
tools: List[ToolHouseTool]

src/toolhouse/sdk.py

+25-24
Original file line numberDiff line numberDiff line change
@@ -11,12 +11,13 @@
1111
from enum import Enum
1212
from typing import List, Union, Dict, Any, Optional
1313

14+
from .exceptions import ToolhouseError
1415
from .net.environment import Environment
1516
from .services.tools import Tools
17+
from .services.local_tools import LocalTools
1618
from .models.Provider import Provider as ProviderModel
1719
from .models.RunToolsRequest import RunToolsRequest
1820
from .models.GetToolsRequest import GetToolsRequest
19-
from ._exceptions import ToolhouseError
2021
try:
2122
from .models.OpenAIStream import stream_to_chat_completion
2223
except ImportError:
@@ -71,7 +72,12 @@ def __init__(self, access_token: Optional[str] = None,
7172
self.metadata: Dict[str, Any] = {}
7273
self.set_base_url(environment.value if isinstance(
7374
environment, Environment) else environment)
75+
self.local_tools: LocalTools = LocalTools()
7476

77+
def register_local_tool(self, local_tool):
78+
"""Register Local Tools"""
79+
return self.local_tools.register_local_tool(local_tool)
80+
7581
def set_metadata(self, key: str, value) -> None:
7682
"""
7783
Sets User Metadata
@@ -159,24 +165,15 @@ def run_tools(self, response, append: bool = True, stream=False) -> List:
159165

160166
if tool_calls:
161167
for tool in tool_calls:
162-
run_tool_request = RunToolsRequest(
163-
tool, self.provider, self.metadata)
164-
run_response = self.tools.run_tools(run_tool_request)
165-
messages.append(run_response.content)
166-
167-
elif self.provider in ("openai_assistants", ProviderModel.OPENAI_ASSISTANTS):
168-
if 'submit_tool_outputs' not in response.required_action:
169-
return []
170-
171-
submit_tool_outputs = response.required_action.submit_tool_outputs
172-
tool_calls = getattr(submit_tool_outputs, 'tool_calls', None)
173-
if tool_calls:
174-
for tool in tool_calls:
175-
run_tool_request = RunToolsRequest(
176-
tool, self.provider, self.metadata)
177-
run_response = self.tools.run_tools(run_tool_request)
178-
messages.append(run_response.content)
179-
168+
if tool.function.name in self.local_tools.get_registered_tools():
169+
result = self.local_tools.run_tools(tool)
170+
messages.append(result.model_dump())
171+
else:
172+
run_tool_request = RunToolsRequest(
173+
tool, self.provider, self.metadata)
174+
run_response = self.tools.run_tools(run_tool_request)
175+
messages.append(run_response.content)
176+
180177
elif self.provider in ("anthropic", ProviderModel.ANTHROPIC):
181178
if response.stop_reason != 'tool_use':
182179
return []
@@ -186,11 +183,15 @@ def run_tools(self, response, append: bool = True, stream=False) -> List:
186183
if tool.type == "tool_use":
187184
if stream:
188185
tool = tool.model_dump()
189-
run_tool_request = RunToolsRequest(
190-
tool, self.provider, self.metadata)
191-
run_response = self.tools.run_tools(run_tool_request)
192-
message['content'].append(run_response.content)
193-
186+
if tool.name in self.local_tools.get_registered_tools():
187+
result = self.local_tools.run_tools(tool)
188+
message['content'].append(result.model_dump())
189+
else:
190+
run_tool_request = RunToolsRequest(
191+
tool, self.provider, self.metadata)
192+
run_response = self.tools.run_tools(run_tool_request)
193+
output = run_response.content
194+
message['content'].append(output)
194195
if message['content']:
195196
if append:
196197
messages.append({'role': 'assistant', 'content': response.content})

src/toolhouse/services/base.py

+2
Original file line numberDiff line numberDiff line change
@@ -48,12 +48,14 @@ def __init__(self, access_token: str) -> None:
4848
"""
4949
self._access_token = access_token
5050

51+
@classmethod
5152
def _pattern_matching(cls, value: str, pattern: str, variable_name: str):
5253
if re.match(r"{}".format(pattern), value):
5354
return value
5455
else:
5556
raise ValueError(f"Invalid value for {variable_name}: must match {pattern}")
5657

58+
@classmethod
5759
def _enum_matching(
5860
cls, value: Union[str, Enum], enum_values: List[str], variable_name: str
5961
):

0 commit comments

Comments
 (0)