22
33from sodalis .config import load_companion_config
44from sodalis .paths import ensure_runtime_dirs , get_prompts_dir
5- from sodalis .llm import generate_response
5+ from sodalis .llm import generate_response , generate_with_tools
66from sodalis .db import init_db
77from sodalis .history import create_chat , delete_chat , list_chats , chat_exists , load_messages , save_message
88from sodalis .prompt_builder import build_prompt , load_prompt_layers
9+ from sodalis .tools import TOOL_SCHEMA_TEXT
910
1011
11- def run_chat (companion , system_prompt , companion_prompt , chat_id = None , resume_chat_id = None ):
12+ def run_chat (companion , system_prompt , companion_prompt , chat_id = None , resume_chat_id = None , use_tools = True ):
1213 print (f"Sodalis { companion ['name' ]} ({ companion ['model' ]} )" )
1314
15+ tools_cfg = companion .get ("tools" , {})
16+
1417 history = []
1518 chat_id = None
1619
@@ -45,7 +48,17 @@ def run_chat(companion, system_prompt, companion_prompt, chat_id = None, resume_
4548
4649 prompt = build_prompt (system_prompt , companion_prompt , history , user_input )
4750
48- response = generate_response (
51+ if use_tools and tools_cfg .get ("enabled" , True ):
52+ response = generate_with_tools (
53+ prompt ,
54+ companion ["model" ],
55+ companion ["temperature" ],
56+ max_rounds = tools_cfg .get ("max_rounds" , 3 ),
57+ timeout = tools_cfg .get ("timeout" , 10 ),
58+ max_output = tools_cfg .get ("max_output" , 4096 ),
59+ )
60+ else :
61+ response = generate_response (
4962 prompt ,
5063 companion ["model" ],
5164 companion ["temperature" ]
@@ -63,15 +76,26 @@ def run_chat(companion, system_prompt, companion_prompt, chat_id = None, resume_
6376 break
6477
6578
66- def run_once (companion , system_prompt , message ):
79+ def run_once (companion , system_prompt , message , use_tools = True ):
6780
81+ tools_cfg = companion .get ("tools" , {})
6882 full_prompt = f"{ system_prompt } \n \n USER: { message } \n ASSISTANT:"
6983
70- response = generate_response (
71- prompt = full_prompt ,
72- model = companion ["model" ],
73- temperature = companion ["temperature" ],
74- )
84+ if use_tools and tools_cfg .get ("enabled" , True ):
85+ response = generate_with_tools (
86+ prompt = full_prompt ,
87+ model = companion ["model" ],
88+ temperature = companion ["temperature" ],
89+ max_rounds = tools_cfg .get ("max_rounds" , 3 ),
90+ timeout = tools_cfg .get ("timeout" , 10 ),
91+ max_output = tools_cfg .get ("max_output" , 4096 ),
92+ )
93+ else :
94+ response = generate_response (
95+ prompt = full_prompt ,
96+ model = companion ["model" ],
97+ temperature = companion ["temperature" ],
98+ )
7599
76100 print (response )
77101
@@ -142,13 +166,23 @@ def main() -> None:
142166 metavar = "CHAT_ID" ,
143167 help = "Delete a saved chat by ID" ,
144168 )
169+ parser .add_argument (
170+ "--no-tools" ,
171+ action = "store_true" ,
172+ help = "Disable tool use for this invocation" ,
173+ )
145174
146175 args = parser .parse_args ()
147176 ensure_runtime_dirs ()
148177
149178 companion = load_companion_config (args .companion )
150179 companion_name = companion .get ("name" , args .companion )
151180 system_prompt , companion_prompt = load_prompt_layers (companion )
181+
182+ use_tools = not args .no_tools
183+ tools_cfg = companion .get ("tools" , {})
184+ if use_tools and tools_cfg .get ("enabled" , True ):
185+ system_prompt = system_prompt + "\n \n " + TOOL_SCHEMA_TEXT
152186
153187 if args .history :
154188 chats = list_chats ()
@@ -171,9 +205,9 @@ def main() -> None:
171205
172206 if args .chat is not None :
173207 if args .chat == "new" :
174- run_chat (companion , system_prompt , companion_prompt )
208+ run_chat (companion , system_prompt , companion_prompt , use_tools = use_tools )
175209 else :
176- run_chat (companion , system_prompt , companion_prompt , resume_chat_id = int (args .chat ))
210+ run_chat (companion , system_prompt , companion_prompt , resume_chat_id = int (args .chat ), use_tools = use_tools )
177211
178212
179213if __name__ == "__main__" :
0 commit comments