Skip to content

Commit 25d5f81

Browse files
authored
feat(example): prompt for example in chat agent (#144)
1 parent 1779e04 commit 25d5f81

File tree

2 files changed

+54
-8
lines changed

2 files changed

+54
-8
lines changed

examples/openai_chat_agent/README.md

Lines changed: 3 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -11,4 +11,6 @@ cp .env.example .env # set OPENAI_API_KEY or adjust OLLAMA_MODEL
1111
uv run app.py
1212
```
1313

14-
`config.json` points to the `shop_api` example so everything runs locally by default. If `OPENAI_API_KEY` is not provided the agent uses the model specified by `OLLAMA_MODEL` (default `llama3`). An Ollama server must be running when using this mode.
14+
Running `app.py` will display the available examples and let you choose which one to start. You can also provide `--example <name>` to skip the prompt.
15+
16+
If `OPENAI_API_KEY` is not provided the agent uses the model specified by `OLLAMA_MODEL` (default `llama3`). An Ollama server must be running when using this mode.

examples/openai_chat_agent/app.py

Lines changed: 51 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -6,6 +6,7 @@
66

77
from __future__ import annotations
88

9+
import argparse
910
import asyncio
1011
import logging
1112
import os
@@ -23,7 +24,7 @@
2324
ErrorData,
2425
TextContent,
2526
)
26-
from mcp_use import MCPAgent, MCPClient, load_config_file
27+
from mcp_use import MCPAgent, MCPClient
2728
from packaging.version import Version
2829

2930
if TYPE_CHECKING: # pragma: no cover - only for type hints
@@ -35,6 +36,40 @@
3536
SYSTEM_MESSAGE = "You are a helpful assistant that talks to the user and uses tools via MCP."
3637

3738

39+
def list_available_examples() -> dict[str, str]:
40+
"""Return a mapping of example name to app path."""
41+
base_dir = os.path.abspath(os.path.join(os.path.dirname(__file__), os.pardir))
42+
examples: dict[str, str] = {}
43+
for entry in os.scandir(base_dir):
44+
if not entry.is_dir() or entry.name == "openai_chat_agent":
45+
continue
46+
app_path = os.path.join(entry.path, "app.py")
47+
if os.path.exists(app_path):
48+
examples[entry.name] = app_path
49+
return examples
50+
51+
52+
def choose_example(examples: dict[str, str], preselected: str | None = None) -> str:
53+
"""Prompt the user to choose an example."""
54+
names = sorted(examples)
55+
if preselected and preselected in examples:
56+
return preselected
57+
58+
print("Available examples:")
59+
for idx, name in enumerate(names, 1):
60+
print(f" {idx}. {name}")
61+
62+
while True:
63+
choice = input("Select example by number or name: ").strip()
64+
if choice in examples:
65+
return choice
66+
if choice.isdigit():
67+
index = int(choice) - 1
68+
if 0 <= index < len(names):
69+
return names[index]
70+
print("Invalid selection, try again.")
71+
72+
3873
def make_sampling_callback(llm: ChatOpenAI | ChatOllama):
3974
async def sampling_callback(
4075
context: ClientSession, params: CreateMessageRequestParams
@@ -96,7 +131,19 @@ async def ensure_ollama_running(model: str) -> None:
96131
async def run_memory_chat() -> None:
97132
"""Run an interactive chat session with conversation memory enabled."""
98133
load_dotenv()
99-
config_file = os.path.join(os.path.dirname(__file__), "config.json")
134+
available_examples = list_available_examples()
135+
136+
parser = argparse.ArgumentParser(description="Interactive MCP Chat Agent")
137+
parser.add_argument(
138+
"--example",
139+
help="Example to run (default: prompt for selection)",
140+
)
141+
args = parser.parse_args()
142+
143+
example_name = choose_example(available_examples, args.example)
144+
server_path = available_examples[example_name]
145+
146+
config = {"mcpServers": {example_name: {"command": "python", "args": [server_path]}}}
100147

101148
openai_key = os.getenv("OPENAI_API_KEY")
102149
ollama_model = os.getenv("OLLAMA_MODEL", "llama3.2")
@@ -115,16 +162,13 @@ async def run_memory_chat() -> None:
115162
mcp_use_version = "0"
116163

117164
if Version(mcp_use_version) > Version("1.3.6"):
118-
client = MCPClient(
119-
load_config_file(config_file),
120-
sampling_callback=make_sampling_callback(llm),
121-
)
165+
client = MCPClient(config, sampling_callback=make_sampling_callback(llm))
122166
else:
123167
logger.warning(
124168
"mcp-use %s does not support sampling, install >1.3.6. Disabling sampling callback",
125169
mcp_use_version,
126170
)
127-
client = MCPClient(load_config_file(config_file))
171+
client = MCPClient(config)
128172

129173
agent = MCPAgent(
130174
llm=llm,

0 commit comments

Comments
 (0)