Skip to content

Commit 0e3039a

Browse files
committed
reformat
1 parent 0ace0b4 commit 0e3039a

3 files changed

Lines changed: 13 additions & 7 deletions

File tree

app/cache.py

Lines changed: 3 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -18,7 +18,9 @@ def __init__(self):
1818
Initialize the LLM cache.
1919
"""
2020
self.cache_dir = config.cache_config.cache_dir
21-
self.cache_dir = os.path.join(os.path.dirname(os.path.dirname(__file__)), self.cache_dir)
21+
self.cache_dir = os.path.join(
22+
os.path.dirname(os.path.dirname(__file__)), self.cache_dir
23+
)
2224
os.makedirs(self.cache_dir, exist_ok=True)
2325

2426
self.ttl = config.cache_config.ttl

app/config.py

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -56,12 +56,12 @@ class BrowserSettings(BaseModel):
5656
None, description="Proxy settings for the browser"
5757
)
5858

59+
5960
class CacheSettings(BaseModel):
6061
enabled: bool = Field(False, description="Whether to enable caching")
6162
ttl: int = Field(86400, description="Cache time-to-live in seconds")
62-
cache_dir: str = Field(
63-
"cache", description="Path to the cache directory"
64-
)
63+
cache_dir: str = Field("cache", description="Path to the cache directory")
64+
6565

6666
class AppConfig(BaseModel):
6767
llm: Dict[str, LLMSettings]
@@ -199,7 +199,7 @@ def browser_config(self) -> Optional[BrowserSettings]:
199199
@property
200200
def search_config(self) -> Optional[SearchSettings]:
201201
return self._config.search_config
202-
202+
203203
@property
204204
def cache_config(self) -> Optional[CacheSettings]:
205205
return self._config.cache_config

app/llm.py

Lines changed: 6 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -164,7 +164,9 @@ async def ask(
164164
params["max_completion_tokens"] = self.max_tokens
165165
else:
166166
params["max_tokens"] = self.max_tokens
167-
params["temperature"] = temperature if temperature is not None else self.temperature
167+
params["temperature"] = (
168+
temperature if temperature is not None else self.temperature
169+
)
168170

169171
if not stream:
170172
# Non-streaming request
@@ -292,7 +294,9 @@ async def ask_tool(
292294
params["max_completion_tokens"] = self.max_tokens
293295
else:
294296
params["max_tokens"] = self.max_tokens
295-
params["temperature"] = temperature if temperature is not None else self.temperature
297+
params["temperature"] = (
298+
temperature if temperature is not None else self.temperature
299+
)
296300

297301
response = await self.client.chat.completions.create(**params)
298302

0 commit comments

Comments
 (0)