|
2 | 2 |
|
3 | 3 | from sodalis.config import load_companion_config |
4 | 4 | from sodalis.paths import ensure_runtime_dirs, get_prompts_dir |
5 | | -from sodalis.llm import generate_response |
| 5 | +from sodalis.llm import generate_response, generate_with_tools |
| 6 | +from sodalis.db import init_db |
| 7 | +from sodalis.history import create_chat, delete_chat, list_chats, chat_exists, load_messages, save_message |
| 8 | +from sodalis.prompt_builder import build_prompt, load_prompt_layers |
| 9 | +from sodalis.tools import TOOL_SCHEMA_TEXT |
6 | 10 |
|
7 | 11 |
|
8 | | -def load_system_prompt(filename: str) -> str: |
9 | | - prompt_path = get_prompts_dir() / filename |
| 12 | +def run_chat(companion, system_prompt, companion_prompt, chat_id = None, resume_chat_id = None, use_tools = True): |
| 13 | + print(f"Sodalis {companion['name']} ({companion['model']})") |
| 14 | + |
| 15 | + tools_cfg = companion.get("tools", {}) |
| 16 | + |
| 17 | + history = [] |
| 18 | + chat_id = None |
10 | 19 |
|
11 | | - if not prompt_path.exists(): |
12 | | - raise FileNotFoundError(f"System prompt not found: {prompt_path}") |
| 20 | + if resume_chat_id is not None: |
| 21 | + if not chat_exists(resume_chat_id): |
| 22 | + print(f"Chat {resume_chat_id} not found.") |
| 23 | + return |
13 | 24 |
|
14 | | - return prompt_path.read_text(encoding="utf-8").strip() |
| 25 | + chat_id = resume_chat_id |
| 26 | + messages = load_messages(chat_id) |
| 27 | + history = db_messages_to_prompt_history(messages) |
| 28 | + |
| 29 | + print(f"Resuming chat {chat_id}.") |
| 30 | + |
| 31 | + while True: |
| 32 | + try: |
| 33 | + user_input = input("> ").strip() |
| 34 | + |
| 35 | + if user_input.lower() in {"exit", "quit"}: |
| 36 | + print("Goodbye.") |
| 37 | + break |
| 38 | + |
| 39 | + if not user_input: |
| 40 | + continue |
| 41 | + |
| 42 | + if chat_id is None: |
| 43 | + chat_id = create_chat( |
| 44 | + companion=companion["name"], |
| 45 | + model=companion["model"], |
| 46 | + ) |
| 47 | + save_message(chat_id, "user", user_input) |
| 48 | + |
| 49 | + prompt = build_prompt(system_prompt, companion_prompt, history, user_input) |
| 50 | + |
| 51 | + if use_tools and tools_cfg.get("enabled", True): |
| 52 | + response = generate_with_tools( |
| 53 | + prompt, |
| 54 | + companion["model"], |
| 55 | + companion["temperature"], |
| 56 | + max_rounds=tools_cfg.get("max_rounds", 3), |
| 57 | + timeout=tools_cfg.get("timeout", 10), |
| 58 | + max_output=tools_cfg.get("max_output", 4096), |
| 59 | + ) |
| 60 | + else: |
| 61 | + response = generate_response( |
| 62 | + prompt, |
| 63 | + companion["model"], |
| 64 | + companion["temperature"] |
| 65 | + ) |
| 66 | + |
| 67 | + print(response) |
| 68 | + |
| 69 | + save_message(chat_id, "assistant", response) |
| 70 | + |
| 71 | + history.append({"role": "user", "content": user_input}) |
| 72 | + history.append({"role": "assistant", "content": response}) |
| 73 | + |
| 74 | + except KeyboardInterrupt: |
| 75 | + print("\nExiting.") |
| 76 | + break |
| 77 | + |
| 78 | + |
| 79 | +def run_once(companion, system_prompt, message, use_tools=True): |
| 80 | + |
| 81 | + tools_cfg = companion.get("tools", {}) |
| 82 | + full_prompt = f"{system_prompt}\n\nUSER: {message}\nASSISTANT:" |
| 83 | + |
| 84 | + if use_tools and tools_cfg.get("enabled", True): |
| 85 | + response = generate_with_tools( |
| 86 | + prompt=full_prompt, |
| 87 | + model=companion["model"], |
| 88 | + temperature=companion["temperature"], |
| 89 | + max_rounds=tools_cfg.get("max_rounds", 3), |
| 90 | + timeout=tools_cfg.get("timeout", 10), |
| 91 | + max_output=tools_cfg.get("max_output", 4096), |
| 92 | + ) |
| 93 | + else: |
| 94 | + response = generate_response( |
| 95 | + prompt=full_prompt, |
| 96 | + model=companion["model"], |
| 97 | + temperature=companion["temperature"], |
| 98 | + ) |
| 99 | + |
| 100 | + print(response) |
| 101 | + |
| 102 | + |
| 103 | +def print_history_table(chats): |
| 104 | + if not chats: |
| 105 | + print("No saved chats found.") |
| 106 | + return |
| 107 | + |
| 108 | + print(f"{'ID':<6} {'MODEL':<15} {'TITLE':<50} {'CREATED'}") |
| 109 | + print("-" * 95) |
| 110 | + |
| 111 | + for chat_id, model, title, created_at in chats: |
| 112 | + title = (title or "")[:50] |
| 113 | + print(f"{chat_id:<6} {model:<15} {title:<50} {created_at}") |
| 114 | + |
| 115 | + |
| 116 | +def confirm_delete(chat_id: int) -> bool: |
| 117 | + answer = input(f"Are you sure you want to delete chat {chat_id}? [y/N]: ").strip().lower() |
| 118 | + return answer in {"y", "yes"} |
| 119 | + |
| 120 | + |
| 121 | +def db_messages_to_prompt_history(messages: tuple): |
| 122 | + history = [] |
| 123 | + |
| 124 | + for role, content in messages: |
| 125 | + if role == "user": |
| 126 | + prompt_role = "USER" |
| 127 | + elif role == "assistant": |
| 128 | + prompt_role = "ASSISTANT" |
| 129 | + else: |
| 130 | + continue |
| 131 | + |
| 132 | + history.append({"role": prompt_role, "content": content}) |
| 133 | + |
| 134 | + return history |
15 | 135 |
|
16 | 136 |
|
17 | 137 | def main() -> None: |
| 138 | + init_db() |
| 139 | + |
18 | 140 | parser = argparse.ArgumentParser(prog="sodalis") |
19 | 141 | parser.add_argument( |
20 | 142 | "message", |
| 143 | + nargs="?", |
21 | 144 | help="Message to send to the companion", |
22 | 145 | ) |
23 | 146 | parser.add_argument( |
24 | 147 | "--companion", |
25 | 148 | default="default", |
26 | 149 | help="Name of the companion profile to load", |
27 | 150 | ) |
28 | | - args = parser.parse_args() |
| 151 | + parser.add_argument( |
| 152 | + "--chat", |
| 153 | + nargs="?", |
| 154 | + const="new", |
| 155 | + metavar="CHAT_ID", |
| 156 | + help="Start a new chat or resume an exisitng chat by ID" |
| 157 | + ) |
| 158 | + parser.add_argument( |
| 159 | + "--history", |
| 160 | + action="store_true", |
| 161 | + help="Show saved chat history", |
| 162 | + ) |
| 163 | + parser.add_argument( |
| 164 | + "--delete", |
| 165 | + type=int, |
| 166 | + metavar="CHAT_ID", |
| 167 | + help="Delete a saved chat by ID", |
| 168 | + ) |
| 169 | + parser.add_argument( |
| 170 | + "--no-tools", |
| 171 | + action="store_true", |
| 172 | + help="Disable tool use for this invocation", |
| 173 | + ) |
29 | 174 |
|
| 175 | + args = parser.parse_args() |
30 | 176 | ensure_runtime_dirs() |
| 177 | + |
| 178 | + companion = load_companion_config(args.companion) |
| 179 | + companion_name = companion.get("name", args.companion) |
| 180 | + system_prompt, companion_prompt = load_prompt_layers(companion) |
31 | 181 |
|
32 | | - config = load_companion_config(args.companion) |
33 | | - companion_name = config.get("name", args.companion) |
| 182 | + use_tools = not args.no_tools |
| 183 | + tools_cfg = companion.get("tools", {}) |
| 184 | + if use_tools and tools_cfg.get("enabled", True): |
| 185 | + system_prompt = system_prompt + "\n\n" + TOOL_SCHEMA_TEXT |
| 186 | + |
| 187 | + if args.history: |
| 188 | + chats = list_chats() |
| 189 | + print_history_table(chats) |
| 190 | + return |
| 191 | + |
| 192 | + if args.delete is not None: |
| 193 | + if not confirm_delete(args.delete): |
| 194 | + print("Delete cancelled.") |
| 195 | + return |
34 | 196 |
|
35 | | - system_prompt = load_system_prompt(config["system_prompt"]) |
| 197 | + deleted = delete_chat(args.delete) |
36 | 198 |
|
37 | | - full_prompt = f"{system_prompt}\n\nUser: {args.message}\nAssistant:" |
| 199 | + if deleted: |
| 200 | + print(f"Chat {args.delete} deleted.") |
| 201 | + else: |
| 202 | + print(f"Chat {args.delete} not found.") |
38 | 203 |
|
39 | | - response = generate_response( |
40 | | - prompt=full_prompt, |
41 | | - model=config["model"], |
42 | | - temperature=config["temperature"], |
43 | | - ) |
44 | | - |
45 | | - print(response) |
| 204 | + return |
46 | 205 |
|
| 206 | + if args.chat is not None: |
| 207 | + if args.chat == "new": |
| 208 | + run_chat(companion, system_prompt, companion_prompt, use_tools=use_tools) |
| 209 | + else: |
| 210 | + run_chat(companion, system_prompt, companion_prompt, resume_chat_id=int(args.chat), use_tools=use_tools) |
| 211 | + |
47 | 212 |
|
48 | 213 | if __name__ == "__main__": |
49 | 214 | main() |
0 commit comments