Skip to content

Commit 197bd0a

Browse files
committed
docs(api)📝: improve and expand docstrings for main API and bots modules
- Expand module-level and function docstrings in scipyconference/__init__.py to clarify usage, parameters, and examples. - Add detailed docstrings and structured Pydantic model documentation in scipyconference/bots.py for LLM-powered pun generation. - Remove unused scipyconference/models.py file.
1 parent 8f9452e commit 197bd0a

File tree

3 files changed

+123
-52
lines changed

3 files changed

+123
-52
lines changed

scipyconference/__init__.py

Lines changed: 58 additions & 14 deletions
Original file line numberDiff line numberDiff line change
@@ -1,10 +1,16 @@
1-
"""Top-level API for scipyconference.
1+
"""
2+
Top-level API for scipyconference.
23
3-
This is the file from which you can do:
4+
This module provides the main interface for generating SciPy conference puns.
5+
It supports both community-curated puns from a JSONL file and LLM-generated puns
6+
when the appropriate environment variables are set.
47
5-
from scipyconference import some_function
8+
Examples::
69
7-
Use it to control the top-level API of your Python data science project.
10+
from scipyconference import create_puns
11+
create_puns(3)
12+
create_puns(1, "pandas dataframes")
13+
create_puns(np.inf)
814
"""
915

1016
import os
@@ -19,7 +25,17 @@
1925

2026

2127
def _read_puns_from_json():
22-
"""Read puns from the JSONL file."""
28+
"""
29+
Read puns from the JSONL file.
30+
31+
Reads the community-curated puns from the puns.json file in JSONL format.
32+
Each line should contain a JSON object with 'pun' and 'github_username' fields.
33+
34+
:return: List of dictionaries containing pun data.
35+
Each dictionary has 'pun' and 'github_username' keys.
36+
Returns empty list if file is not found or malformed.
37+
:rtype: list
38+
"""
2339
pun_path = Path(__file__).parent / Path("puns.json")
2440
puns = []
2541
try:
@@ -37,7 +53,15 @@ def _read_puns_from_json():
3753

3854

3955
def _should_use_llm():
40-
"""Check if we should use LLM-generated puns based on environment variables."""
56+
"""
57+
Check if we should use LLM-generated puns based on environment variables.
58+
59+
Determines whether to use LLM-generated puns by checking if any of the
60+
PUNBOT_* environment variables are set.
61+
62+
:return: True if any PUNBOT_* environment variables are set, False otherwise.
63+
:rtype: bool
64+
"""
4165
return any(
4266
[
4367
os.getenv("PUNBOT_API_KEY"),
@@ -48,9 +72,35 @@ def _should_use_llm():
4872

4973

5074
def create_puns(number: int, prompt: str = ""):
51-
"""Create `number` of puns for the SciPy conference."""
75+
"""
76+
Create ``number`` of puns for the SciPy conference.
77+
78+
This function generates puns for the SciPy conference community. It can work
79+
in three modes:
80+
81+
1. Community-curated puns (default): Reads from puns.json file
82+
2. LLM-generated puns: Uses AI to generate puns based on a prompt
83+
3. Party mode: Creates a celebration with random emojis when number is np.inf
84+
85+
:param int number: Number of puns to generate. Use np.inf for party mode.
86+
:param str prompt: Prompt for LLM-generated puns. Only used when LLM mode is active.
87+
:return: None
88+
:rtype: None
89+
:raises ImportError: If llamabot is required but not installed.
90+
:notes:
91+
The function automatically chooses between community-curated and LLM-generated
92+
puns based on environment variables:
93+
- If any of PUNBOT_API_KEY, PUNBOT_MODEL_NAME, or PUNBOT_API_BASE are set,
94+
it will attempt to use LLM-generated puns
95+
- Otherwise, it will use community-curated puns from puns.json
96+
97+
Examples::
98+
99+
create_puns(3) # Generate 3 community-curated puns
100+
create_puns(1, "pandas") # Generate 1 LLM pun about pandas
101+
create_puns(np.inf) # Create a party celebration
102+
"""
52103
if number is np.inf:
53-
# TODO: make this to do something really special
54104
party_emojis = [
55105
"🎉",
56106
"🎊",
@@ -78,30 +128,24 @@ def create_puns(number: int, prompt: str = ""):
78128
print(" ".join(random.sample(party_emojis, random.randint(5, 15))))
79129
else:
80130
if _should_use_llm():
81-
# Use LLM-generated puns
82131
if punbot is None:
83132
print("llamabot is required for LLM-generated puns.")
84133
print("Install it with: pip install scipyconference[llm]")
85134
print(
86135
"Or use community-curated puns by not setting PUNBOT_* environment variables." # noqa: E501
87136
)
88137
return
89-
90138
for i in range(number):
91139
print("🤖🐍:")
92140
punbot(prompt)
93141
else:
94-
# Use community-curated puns from JSONL file
95142
puns = _read_puns_from_json()
96143
if not puns:
97144
print(
98145
"No puns found in puns.json. Consider adding some community-curated puns!" # noqa: E501
99146
)
100147
return
101-
102-
# Randomly sample puns, with replacement if needed
103148
selected_puns = random.choices(puns, k=number)
104-
105149
for pun_data in selected_puns:
106150
print()
107151
print(f"@{pun_data['github_username']}: {pun_data['pun']}")

scipyconference/bots.py

Lines changed: 65 additions & 37 deletions
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,14 @@
1-
"""Bots for the SciPy conference."""
1+
"""
2+
Bots for the SciPy conference.
3+
4+
This module provides LLM-powered pun generation capabilities for the SciPy conference.
5+
It uses llamabot to create structured puns with emojis and explanations.
6+
"""
27

38
import os
49

10+
from pydantic import BaseModel, Field
11+
512
try:
613
import llamabot as lmb
714

@@ -10,52 +17,73 @@
1017
LLAMABOT_AVAILABLE = False
1118

1219

13-
def punbot_sysprompt():
14-
"""You are a witty, science-savvy language model specializing in generating clever
15-
and contextually appropriate puns. Your audience is the SciPy conference community:
16-
scientists, engineers, data scientists, and software developers who use and
17-
contribute to open-source scientific Python tools. You understand the culture of the
18-
conference: intellectually curious, collaborative, humorous, and fluent in Python
19-
and the scientific computing stack.
20-
21-
Your puns should reference topics commonly discussed at SciPy, such as:
22-
23-
- Python libraries (e.g., NumPy, SciPy, pandas, matplotlib, scikit-learn, PyMC)
24-
- Scientific computing concepts (e.g., optimization, linear algebra, FFTs,
25-
simulations)
26-
- Open-source software development practices
27-
- Version control and CI/CD workflows
28-
- Academic and research culture
29-
- Conference life (e.g., poster sessions, lightning talks, coffee breaks)
30-
31-
Keep your puns light-hearted, nerdy, and ideally groan-worthy in a charming way.
32-
You're allowed to use wordplay, homophones, technical double meanings, and mashups.
33-
Avoid anything offensive, insensitive, or exclusionary.
34-
35-
Generate a pun or short one-liner that would make a SciPy attendee smile, chuckle,
36-
or roll their eyes appreciatively.
20+
def _create_punbot():
3721
"""
38-
if LLAMABOT_AVAILABLE:
39-
return lmb.prompt("system")(punbot_sysprompt.__doc__)
40-
else:
41-
return punbot_sysprompt.__doc__
42-
22+
Create and configure the punbot instance for LLM-generated puns.
4323
44-
def _create_punbot():
45-
"""Create the punbot instance if llamabot is available."""
24+
:raises ImportError: If llamabot is not available
25+
and the user tries to use LLM features.
26+
:return: A configured llamabot instance for generating structured puns.
27+
:rtype: lmb.StructuredBot
28+
"""
4629
if not LLAMABOT_AVAILABLE:
4730
raise ImportError(
4831
"llamabot is required for LLM-generated puns. "
4932
"Install it with: pip install scipyconference[llm]"
5033
)
5134

52-
return lmb.SimpleBot(
53-
system_prompt=punbot_sysprompt(),
35+
@lmb.prompt("system")
36+
def scipy_punbot_sysprompt():
37+
"""You are an expert at mimicking Paul Ivanov,
38+
a well-known pun master at the SciPy conferences.
39+
40+
Paul will inject award-winning puns in response
41+
to almost any theme that shows up during the lightning talks at SciPy.
42+
Most of what SciPy attendees talk about are python, linux, science, and more.
43+
Your mission is to generate puns in response to whatever theme is thrown at you.
44+
The puns should be coherent.
45+
"""
46+
47+
class Pun(BaseModel):
48+
"""
49+
Structured pun model with emoji, statement, and explanation.
50+
51+
This Pydantic model defines the structure for LLM-generated puns,
52+
ensuring consistent output format with emoji, pun text, and explanation.
53+
54+
:ivar emoji: Single emoji that represents the theme or mood of the pun.
55+
:vartype emoji: str
56+
:ivar pun_statement: The pun itself,
57+
with the pun core highlighted with italicization.
58+
:vartype pun_statement: str
59+
:ivar explanation: Explanation of why the pun works and what makes it funny.
60+
:vartype explanation: str
61+
"""
62+
63+
emoji: str = Field(description="single emoji for the whole pun.")
64+
pun_statement: str = Field(
65+
description="the pun itself, with the pun core highlighted with italicization." # noqa: E501
66+
)
67+
explanation: str = Field(description="Why the pun is a pun.")
68+
69+
def __str__(self):
70+
"""
71+
String representation of the pun.
72+
73+
:return: The pun formatted as "emoji pun_statement".
74+
:rtype: str
75+
"""
76+
return f"{self.emoji} {self.pun_statement}"
77+
78+
bot = lmb.StructuredBot(
79+
system_prompt=scipy_punbot_sysprompt(),
80+
pydantic_model=Pun,
81+
temperature=0.9,
82+
api_key=os.getenv("PUNBOT_API_KEY", None),
83+
api_base=os.getenv("PUNBOT_API_BASE", None),
5484
model_name=os.getenv("PUNBOT_MODEL_NAME", "gpt-4.1"),
55-
api_base=os.getenv("PUNBOT_API_BASE", "https://api.openai.com/v1"),
56-
api_key=os.getenv("PUNBOT_API_KEY", ""),
57-
temperature=float(os.getenv("PUNBOT_TEMPERATURE", 2.7)),
5885
)
86+
return bot
5987

6088

6189
# Create punbot instance only if llamabot is available

scipyconference/models.py

Lines changed: 0 additions & 1 deletion
This file was deleted.

0 commit comments

Comments
 (0)