Skip to content

Commit e363e3f

Browse files
committed
feat(prompts): add a central prompt module
1 parent 746fa91 commit e363e3f

File tree

5 files changed

+611
-0
lines changed

5 files changed

+611
-0
lines changed
Lines changed: 5 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,5 @@
1+
from .library import PromptLibrary
2+
from .schemas import SemanticRouterResponse
3+
from .service import PromptService
4+
5+
__all__ = ["PromptLibrary", "PromptService", "SemanticRouterResponse"]
Lines changed: 201 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,201 @@
1+
"""
2+
Prompt Library Module for Flare AI RAG
3+
4+
This module provides a centralized management system for AI prompts used throughout
5+
the Flare AI RAG application. It handles the organization, storage, and retrieval
6+
of various prompt templates used for different operations like token transactions,
7+
account generation, and user interactions.
8+
9+
The module implements a PromptLibrary class that maintains a collection of Prompt
10+
objects, each representing a specific type of interaction or operation template.
11+
Prompts are categorized for easy management and retrieval.
12+
"""
13+
14+
import structlog
15+
16+
from flare_ai_rag.prompts.schemas import (
17+
Prompt,
18+
RAGRouterResponse,
19+
SemanticRouterResponse,
20+
)
21+
from flare_ai_rag.prompts.templates import (
22+
CONVERSATIONAL,
23+
RAG_RESPONDER,
24+
RAG_ROUTER,
25+
REMOTE_ATTESTATION,
26+
SEMANTIC_ROUTER,
27+
)
28+
29+
logger = structlog.get_logger(__name__)
30+
31+
32+
class PromptLibrary:
33+
"""
34+
A library for managing and organizing AI prompts used in the Flare AI RAG.
35+
36+
This class serves as a central repository for all prompt templates used in
37+
the application. It provides functionality to add, retrieve, and categorize
38+
prompts for various operations such as token transactions, account management,
39+
and user interactions.
40+
41+
Attributes:
42+
prompts (dict[str, Prompt]): Dictionary storing prompt objects
43+
with their names as keys.
44+
"""
45+
46+
def __init__(self) -> None:
47+
"""
48+
Initialize a new PromptLibrary instance.
49+
50+
Creates an empty prompt dictionary and populates it with default prompts
51+
through the _initialize_default_prompts method.
52+
"""
53+
self.prompts: dict[str, Prompt] = {}
54+
self._initialize_default_prompts()
55+
56+
def _initialize_default_prompts(self) -> None:
57+
"""
58+
Initialize the library with a set of default prompts.
59+
60+
Creates and adds the following default prompts:
61+
- semantic_router: For routing user queries
62+
- token_send: For token transfer operations
63+
- token_swap: For token swap operations
64+
- generate_account: For wallet generation
65+
- conversational: For general user interactions
66+
- request_attestation: For remote attestation requests
67+
- tx_confirmation: For transaction confirmation
68+
69+
This method is called automatically during instance initialization.
70+
"""
71+
default_prompts = [
72+
Prompt(
73+
name="semantic_router",
74+
description="Route user query based on user input",
75+
template=SEMANTIC_ROUTER,
76+
required_inputs=["user_input"],
77+
response_mime_type="text/x.enum",
78+
response_schema=SemanticRouterResponse,
79+
category="router",
80+
),
81+
Prompt(
82+
name="conversational",
83+
description="Converse with a user",
84+
template=CONVERSATIONAL,
85+
required_inputs=["user_input"],
86+
response_schema=None,
87+
response_mime_type=None,
88+
category="conversational",
89+
),
90+
Prompt(
91+
name="rag_router",
92+
description="The ",
93+
template=RAG_ROUTER,
94+
required_inputs=["user_input"],
95+
response_mime_type="application/json",
96+
response_schema=RAGRouterResponse,
97+
category="rag-router",
98+
),
99+
Prompt(
100+
name="rag_responder",
101+
description="The ",
102+
template=RAG_RESPONDER,
103+
required_inputs=["user_input"],
104+
response_schema=None,
105+
response_mime_type=None,
106+
category="conversational",
107+
),
108+
Prompt(
109+
name="request_attestation",
110+
description="User has requested a remote attestation",
111+
template=REMOTE_ATTESTATION,
112+
required_inputs=None,
113+
response_schema=None,
114+
response_mime_type=None,
115+
category="conversational",
116+
),
117+
]
118+
119+
for prompt in default_prompts:
120+
self.add_prompt(prompt)
121+
122+
def add_prompt(self, prompt: Prompt) -> None:
123+
"""
124+
Add a new prompt to the library.
125+
126+
Args:
127+
prompt (Prompt): The prompt object to add to the library.
128+
129+
Logs:
130+
Debug log entry when prompt is successfully added.
131+
132+
Example:
133+
```python
134+
custom_prompt = Prompt(name="custom", template="...", category="misc")
135+
library.add_prompt(custom_prompt)
136+
```
137+
"""
138+
self.prompts[prompt.name] = prompt
139+
logger.debug("prompt_added", name=prompt.name, category=prompt.category)
140+
141+
def get_prompt(self, name: str) -> Prompt:
142+
"""
143+
Retrieve a prompt by its name.
144+
145+
Args:
146+
name (str): The name of the prompt to retrieve.
147+
148+
Returns:
149+
Prompt: The requested prompt object.
150+
151+
Raises:
152+
KeyError: If the prompt name doesn't exist in the library.
153+
154+
Example:
155+
```python
156+
try:
157+
prompt = library.get_prompt("token_send")
158+
except KeyError:
159+
print("Prompt not found")
160+
```
161+
"""
162+
if name not in self.prompts:
163+
logger.error("prompt_not_found", name=name)
164+
msg = f"Prompt '{name}' not found in library"
165+
raise KeyError(msg)
166+
return self.prompts[name]
167+
168+
def get_prompts_by_category(self, category: str) -> list[Prompt]:
169+
"""
170+
Get all prompts in a specific category.
171+
172+
Args:
173+
category (str): The category to filter prompts by.
174+
175+
Returns:
176+
list[Prompt]: A list of all prompts in the specified category.
177+
"""
178+
return [
179+
prompt for prompt in self.prompts.values() if prompt.category == category
180+
]
181+
182+
def list_categories(self) -> list[str]:
183+
"""
184+
List all available prompt categories.
185+
186+
Returns:
187+
list[str]: A list of unique category names used in the library.
188+
189+
Example:
190+
```python
191+
categories = library.list_categories()
192+
print("Available categories:", categories)
193+
```
194+
"""
195+
return list(
196+
{
197+
prompt.category
198+
for prompt in self.prompts.values()
199+
if prompt.category is not None
200+
}
201+
)
Lines changed: 157 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,157 @@
1+
"""
2+
Schema Definitions for Flare AI DeFAI Prompts
3+
4+
This module defines the core data structures and types used for managing prompts
5+
and their responses in the Flare AI DeFAI system. It includes enums for semantic
6+
routing, type definitions for token operations, and a dataclass for prompt
7+
templates.
8+
9+
The module provides type safety and structured data handling for AI prompt
10+
interactions, ensuring consistency in prompt formatting and response handling
11+
across the application.
12+
"""
13+
14+
from dataclasses import dataclass
15+
from enum import Enum
16+
from string import Template
17+
from typing import TypedDict
18+
19+
20+
class SemanticRouterResponse(str, Enum):
21+
"""
22+
Enumeration of possible semantic routing outcomes for user queries.
23+
24+
This enum defines the various types of operations that can be triggered
25+
based on user input analysis. Each value represents a specific action
26+
or response type that the system can handle.
27+
28+
Attributes:
29+
REQUEST_ATTESTATION: Route to attestation request handling
30+
CONVERSATIONAL: Route to general conversational response
31+
RAG_ROUTER: Router to RAG pipeline router
32+
RAG_RESPONDER: Router to RAG pipeline responder
33+
"""
34+
35+
REQUEST_ATTESTATION = "RequestAttestation"
36+
CONVERSATIONAL = "Conversational"
37+
RAG_ROUTER = "RagRouter"
38+
RAG_RESPONDER = "RagResponder"
39+
40+
41+
class RAGRouterResponse(TypedDict):
42+
"""
43+
Type definition for RAG router response type.
44+
45+
Defines the required fields for a RAG routing operation,
46+
47+
Attributes:
48+
classification (str): The response class
49+
"""
50+
51+
classification: str
52+
53+
54+
class PromptInputs(TypedDict, total=False):
55+
"""
56+
Type definition for various types of prompt inputs.
57+
58+
A flexible TypedDict that defines possible input types for prompts.
59+
The total=False flag indicates that all fields are optional.
60+
61+
Attributes:
62+
user_input (str): Raw user input text
63+
text (str): Processed or formatted text
64+
content (str): General content string
65+
code (str): Code snippet or related content
66+
"""
67+
68+
user_input: str
69+
text: str
70+
content: str
71+
code: str
72+
73+
74+
@dataclass
75+
class Prompt:
76+
"""
77+
A dataclass representing an AI prompt template with its metadata
78+
and formatting logic.
79+
80+
This class encapsulates all information needed to define and use an AI prompt,
81+
including its template text, required inputs, response handling, and metadata.
82+
83+
Attributes:
84+
name (str): Unique identifier for the prompt
85+
description (str): Human-readable description of the prompt's purpose
86+
template (str): The prompt template text with optional placeholder variables
87+
required_inputs (list[str] | None): List of required input variable names
88+
response_schema (type | None): Expected response type/schema
89+
response_mime_type (str | None): MIME type of the expected response
90+
examples (list[dict[str, str]] | None): Example usages of the prompt
91+
category (str | None): Grouping category for the prompt
92+
version (str): Version string for the prompt template
93+
94+
Example:
95+
```python
96+
prompt = Prompt(
97+
name="token_send",
98+
description="Format a token send request",
99+
template="Send ${amount} tokens to ${address}",
100+
required_inputs=["amount", "address"],
101+
response_schema=TokenSendResponse,
102+
response_mime_type="application/json",
103+
)
104+
formatted = prompt.format(amount="100", address="0x123...")
105+
```
106+
"""
107+
108+
name: str
109+
description: str
110+
template: str
111+
required_inputs: list[str] | None
112+
response_schema: type | None
113+
response_mime_type: str | None
114+
examples: list[dict[str, str]] | None = None
115+
category: str | None = None
116+
version: str = "1.0"
117+
118+
def format(self, **kwargs: str | PromptInputs) -> str:
119+
"""
120+
Format the prompt template with provided input values.
121+
122+
This method uses string.Template to substitute variables in the prompt
123+
template with provided values. It validates that all required inputs
124+
are provided before formatting.
125+
126+
Args:
127+
**kwargs: Keyword arguments containing values for template variables.
128+
Can be strings or PromptInputs objects.
129+
130+
Returns:
131+
str: The formatted prompt string with all variables substituted.
132+
133+
Raises:
134+
ValueError: If any required inputs are missing from kwargs.
135+
KeyError: If template substitution fails due to missing keys.
136+
137+
Example:
138+
```python
139+
prompt = Prompt(
140+
template="Hello ${name}!",
141+
required_inputs=["name"],
142+
...
143+
)
144+
result = prompt.format(name="Alice")
145+
```
146+
"""
147+
if not self.required_inputs:
148+
return self.template
149+
150+
try:
151+
return Template(self.template).safe_substitute(**kwargs)
152+
except KeyError as e:
153+
missing_keys = set(self.required_inputs) - set(kwargs.keys())
154+
if missing_keys:
155+
msg = f"Missing required inputs: {missing_keys}"
156+
raise ValueError(msg) from e
157+
raise

0 commit comments

Comments
 (0)