Skip to content

Commit e139834

Browse files
authored
feat: make model choice optional for /ask command (#57)
1 parent e99216a commit e139834

File tree

2 files changed

+6
-33
lines changed

2 files changed

+6
-33
lines changed

src/discord_bot/bot.py

+6-25
Original file line numberDiff line numberDiff line change
@@ -26,17 +26,17 @@ async def on_message_create(event: interactions.api.events.MessageCreate) -> Non
2626

2727

2828
@interactions.slash_command(name="ask", description="Ask an LLM to answer anything")
29+
@interactions.slash_option(
30+
name="prompt", description="Enter your prompt", required=True, opt_type=interactions.OptionType.STRING, min_length=10
31+
)
2932
@interactions.slash_option(
3033
name="model",
3134
description="Choose an LLM model",
32-
required=True,
35+
required=False,
3336
opt_type=interactions.OptionType.STRING,
34-
autocomplete=True,
37+
choices=[interactions.SlashCommandChoice(name=model, value=model) for model in MODEL_CHOICES],
3538
)
36-
@interactions.slash_option(
37-
name="prompt", description="Enter your prompt", required=True, opt_type=interactions.OptionType.STRING, min_length=10
38-
)
39-
async def ask(ctx: interactions.SlashContext, model: str = "", prompt: str = "") -> None:
39+
async def ask(ctx: interactions.SlashContext, prompt: str = "", model: str = DEFAULT_MODEL) -> None:
4040
if model not in MODEL_CHOICES:
4141
await ctx.send(f"Invalid model `{model}`. Please choose from `{MODEL_CHOICES}`.")
4242
return
@@ -48,25 +48,6 @@ async def ask(ctx: interactions.SlashContext, model: str = "", prompt: str = "")
4848
await ctx.send(r)
4949

5050

51-
@ask.autocomplete("model")
52-
async def autocomplete(ctx: interactions.AutocompleteContext) -> None:
53-
string_option_input = ctx.input_text
54-
# you can use ctx.kwargs.get("name") to get the current state of other options - note they can be empty too
55-
# make sure you respond within three seconds
56-
57-
filtered_choices = [choice for choice in MODEL_CHOICES if string_option_input in choice]
58-
59-
await ctx.send(
60-
choices=[
61-
{
62-
"name": choice,
63-
"value": choice,
64-
}
65-
for choice in filtered_choices
66-
]
67-
)
68-
69-
7051
@interactions.slash_command(name="review-resume", description="Ask an LLM to review a resume")
7152
@interactions.slash_option(
7253
name="url",

tests/test_llm.py

-8
Original file line numberDiff line numberDiff line change
@@ -20,14 +20,6 @@ async def test_answer_question__LLM_should_response() -> None:
2020
assert not response[0].startswith("Error")
2121

2222

23-
@pytest.mark.asyncio
24-
async def test_answer_question__invalid_model() -> None:
25-
invalid_model = "not-a-gpt"
26-
response = await llm.answer_question(invalid_model, simple_prompt, AI_SERVER_URL)
27-
28-
assert response[0].startswith("Error")
29-
30-
3123
@pytest.mark.asyncio
3224
async def test_answer_concurrent_question__should_be_at_the_same_time() -> None:
3325
n_models = 2

0 commit comments

Comments
 (0)