Skip to content

Commit 3f8b49c

Browse files
committed
Add comprehensive JSON prompt use cases and integrations
This update significantly enhances the JSON-based interaction capabilities and ensures robust testing and validation for seamless integration. - **Fast_API Routes**: - Introduced `Routes__Json_Prompt` with endpoints for: - Summary creation - Document improvement - Summary validation - Email generation - Form generation - Graph extraction - Mermaid diagram generation - **Prompt to JSON Framework**: - Extended `Prompt_To_Json__Open_AI` to support structured responses with enhanced metadata (e.g., duration, temperature, seed, timestamp). - Replaced `model` with `llm_model` for clarity. - Updated response parsing to include detailed metrics and response schemas. - **Use Cases**: - Implemented specialized classes for JSON prompt scenarios: - `Json_Prompt__Create_Summary` - `Json_Prompt__Document_Improver` - `Json_Prompt__Email_Generator` - `Json_Prompt__Form_Generator` - `Json_Prompt__Graph_Extractor` - `Json_Prompt__Mermaid_Generator` - `Json_Prompt__Validate_Summary` - **Integration Tests**: - Added integration tests for all new use cases with validation against expected behavior and edge cases. - **Refactors and Cleanup**: - Refactored legacy `model` references in test cases and core logic. - Improved formatting - Added TODO comments for future improvements (e.g., replacing OpenAI client with a custom requests-based implementation for performance).
1 parent 10cda8c commit 3f8b49c

19 files changed

+805
-64
lines changed

osbot_llms/fast_api/Fast_API_Route.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -2,7 +2,7 @@
22

33
from osbot_utils.base_classes.Kwargs_To_Self import Kwargs_To_Self
44

5-
5+
# todo: legacy replace with the new Fast_API_Routes
66
class Fast_API__Routes(Kwargs_To_Self):
77
router : APIRouter
88
path_prefix : str

osbot_llms/fast_api/Fast_API__LLMs.py

Lines changed: 4 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -2,14 +2,16 @@
22
from osbot_fast_api.api.Fast_API import Fast_API
33
from osbot_llms.fast_api.routes.Routes__Chat import Routes__Chat
44
from osbot_llms.fast_api.routes.Routes__Info import Routes__Info
5+
from osbot_llms.fast_api.routes.Routes__Json_Prompt import Routes__Json_Prompt
56

67

78
class Fast_API__LLMs(Fast_API):
89
base_path : str = '/llms'
910
enable_cors: bool = True
1011

1112
def setup_routes(self):
12-
self.add_routes(Routes__Chat)
13-
self.add_routes(Routes__Info)
13+
self.add_routes(Routes__Chat )
14+
self.add_routes(Routes__Info )
15+
self.add_routes(Routes__Json_Prompt)
1416

1517

Lines changed: 122 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,122 @@
1+
from pydantic import BaseModel
2+
from osbot_fast_api.api.Fast_API_Routes import Fast_API_Routes
3+
from typing import Optional, Dict, TypeVar, Type
4+
from osbot_llms.llms.prompt_to_json.use_cases.Json_Prompt__Create_Summary import Model__Response_Format__Json_Prompt__Create_Summary, Json_Prompt__Create_Summary
5+
from osbot_llms.llms.prompt_to_json.use_cases.Json_Prompt__Document_Improver import Model__Response_Format__Json_Prompt__Document_Improver, Json_Prompt__Document_Improver
6+
from osbot_llms.llms.prompt_to_json.use_cases.Json_Prompt__Validate_Summary import Model__Response_Format__Json_Prompt__Validate_Summary, Json_Prompt__Validate_Summary
7+
from osbot_llms.llms.prompt_to_json.use_cases.Json_Prompt__Email_Generator import Model__Response_Format__Json_Prompt__Email_Generator, Json_Prompt__Email_Generator
8+
from osbot_llms.llms.prompt_to_json.use_cases.Json_Prompt__Form_Generator import Model__Response_Format__Json_Prompt__Form_Generator, Json_Prompt__Form_Generator
9+
from osbot_llms.llms.prompt_to_json.use_cases.Json_Prompt__Graph_Extractor import Model__Response_Format__Json_Prompt__Graph_Extractor, Json_Prompt__Graph_Extractor, GraphResponse
10+
from osbot_llms.llms.prompt_to_json.use_cases.Json_Prompt__Mermaid_Generator import Model__Response_Format__Json_Prompt__Mermaid_Generator, Json_Prompt__Mermaid_Generator
11+
from osbot_utils.helpers.Random_Guid import Random_Guid
12+
from osbot_utils.utils.Status import status_ok, status_error
13+
14+
15+
class Model__Fast_API__Create_Summary(BaseModel):
16+
target_text: str
17+
18+
class Model__Fast_API__Document_Improver(BaseModel):
19+
current_content: str
20+
improvement_request: str
21+
22+
class Model__Fast_API__Validate_Summary(BaseModel):
23+
original_text: str
24+
summary: str
25+
26+
class Model__Fast_API__Email_Generator(BaseModel):
27+
email_requirements: str
28+
context: Optional[Dict] = None
29+
30+
class Model__Fast_API__Form_Generator(BaseModel):
31+
form_requirements: str
32+
33+
class Model__Fast_API__Graph_Extractor(BaseModel):
34+
content: str
35+
36+
class Model__Fast_API__Mermaid_Generator(BaseModel):
37+
graph: GraphResponse
38+
style_request: Optional[str] = ""
39+
40+
41+
class Routes__Json_Prompt(Fast_API_Routes):
42+
tag : str = 'json-prompt'
43+
44+
def invoke_prompt(self, target_class: Type, target_method: str, method_kwargs) -> Dict:
45+
try:
46+
response = getattr(target_class(), target_method)(**method_kwargs)
47+
48+
data = { 'json_prompt_id' : Random_Guid() ,
49+
'duration' : response.get('duration' ),
50+
'llm_model' : response.get('llm_model' ),
51+
'response_json' : response.get('response_json' ),
52+
'response_schema': response.get('response_schema'),
53+
'seed' : response.get('seed' ),
54+
'temperature' : response.get('temperature' ),
55+
'timestamp' : response.get('timestamp' ),
56+
'tokens' : response.get('tokens' ),
57+
}
58+
return status_ok(message="Json Prompt created ok", data=data)
59+
except Exception as exception:
60+
return status_error(message="Failed to create Json Prompt", error=f'{exception}')
61+
62+
def create_summary(self, request: Model__Fast_API__Create_Summary):
63+
target_class = Json_Prompt__Create_Summary
64+
target_method = 'create_summary'
65+
target_kwargs = dict(target_text=request.target_text)
66+
result = self.invoke_prompt(target_class, target_method, target_kwargs)
67+
return result
68+
69+
def improve_document(self, request: Model__Fast_API__Document_Improver):
70+
target_class = Json_Prompt__Document_Improver
71+
target_method = 'improve_document'
72+
target_kwargs = dict(current_content = request.current_content,
73+
improvement_request = request.improvement_request)
74+
result = self.invoke_prompt(target_class, target_method, target_kwargs)
75+
return result
76+
77+
def validate_summary(self, request: Model__Fast_API__Validate_Summary):
78+
target_class = Json_Prompt__Validate_Summary
79+
target_method = 'validate_summary'
80+
target_kwargs = dict(original_text = request.original_text,
81+
summary = request.summary)
82+
result = self.invoke_prompt(target_class, target_method, target_kwargs)
83+
return result
84+
85+
def generate_email(self, request: Model__Fast_API__Email_Generator):
86+
target_class = Json_Prompt__Email_Generator
87+
target_method = 'generate_email'
88+
target_kwargs = dict(email_requirements = request.email_requirements,
89+
context = request.context)
90+
result = self.invoke_prompt(target_class, target_method, target_kwargs)
91+
return result
92+
93+
def generate_form(self, request: Model__Fast_API__Form_Generator):
94+
target_class = Json_Prompt__Form_Generator
95+
target_method = 'generate_form'
96+
target_kwargs = dict(form_requirements = request.form_requirements)
97+
result = self.invoke_prompt(target_class, target_method, target_kwargs)
98+
return result
99+
100+
def extract_graph(self, request: Model__Fast_API__Graph_Extractor):
101+
target_class = Json_Prompt__Graph_Extractor
102+
target_method = 'extract_graph'
103+
target_kwargs = dict(content = request.content)
104+
result = self.invoke_prompt(target_class, target_method, target_kwargs)
105+
return result
106+
107+
def generate_mermaid(self, request: Model__Fast_API__Mermaid_Generator):
108+
target_class = Json_Prompt__Mermaid_Generator
109+
target_method = 'convert_to_mermaid'
110+
target_kwargs = dict(graph = request.graph,
111+
style_request = request.style_request)
112+
result = self.invoke_prompt(target_class, target_method, target_kwargs)
113+
return result
114+
115+
def setup_routes(self):
116+
self.add_route_post(self.create_summary )
117+
self.add_route_post(self.improve_document)
118+
self.add_route_post(self.validate_summary)
119+
self.add_route_post(self.generate_email )
120+
self.add_route_post(self.generate_form )
121+
self.add_route_post(self.extract_graph )
122+
self.add_route_post(self.generate_mermaid)
Lines changed: 26 additions & 34 deletions
Original file line numberDiff line numberDiff line change
@@ -1,18 +1,17 @@
11
from typing import List, Dict
2-
3-
import openai
4-
from pydantic import BaseModel
52
from pydantic._internal._model_construction import ModelMetaclass
63

74
from osbot_llms.llms.API_Open_AI import API_Open_AI
85
from osbot_utils.base_classes.Type_Safe import Type_Safe
6+
from osbot_utils.context_managers.capture_duration import capture_duration
7+
from osbot_utils.helpers.Timestamp_Now import Timestamp_Now
98
from osbot_utils.utils.Json import str_to_json
109

1110

1211
class Prompt_To_Json__Open_AI(Type_Safe):
1312
response_format : ModelMetaclass
1413
messages : List[Dict[str, str]]
15-
model : str
14+
llm_model : str
1615
temperature : float
1716
seed : int
1817

@@ -30,37 +29,24 @@ def add_message(self,role, content):
3029
return self
3130

3231
def invoke(self):
33-
response = self.invoke__raw()
34-
response_parsed = self.parse_response(response)
32+
with capture_duration() as duration:
33+
response = self.invoke__raw()
34+
response_parsed = self.parse_response(response, duration.seconds)
3535
return response_parsed
3636

3737
def invoke__raw(self):
38-
client = API_Open_AI().client()
39-
40-
try:
41-
completion = client.beta.chat.completions.parse(**self.invoke_kwargs())
42-
return completion
43-
except Exception as exception: # todo: figure out the exceptions to handle here
44-
raise exception
45-
# # Handle edge cases
46-
# if type(e) == openai.LengthFinishReasonError:
47-
# # Retry with a higher max tokens
48-
# print("Too many tokens: ", e)
49-
# pass
50-
# else:
51-
# # Handle other exceptions
52-
# print(e)
53-
# pass
38+
client = API_Open_AI().client() # todo replace this with requests api (and see the performance implications of creating this object all the time, for example see if there are advantages in caching the requests session)
39+
return client.beta.chat.completions.parse(**self.invoke_kwargs())
5440

5541
def invoke_kwargs(self):
56-
return dict(model = self.model ,
42+
return dict(model = self.llm_model ,
5743
messages = self.messages ,
5844
response_format = self.response_format,
5945
seed = self.seed ,
6046
temperature =self.temperature )
6147

6248
def set_model(self, model):
63-
self.model = model
49+
self.llm_model = model
6450
return self
6551

6652
def set_model__gpt_4o(self):
@@ -73,13 +59,19 @@ def set_response_format(self, response_format):
7359
self.response_format = response_format
7460
return self
7561

76-
def parse_response(self, response):
77-
choice = response.choices[0]
78-
message = choice.message
79-
usage = response.usage
80-
content = str_to_json(message.content)
81-
model = message.parsed
82-
tokens = usage.total_tokens
83-
return dict(content = content,
84-
model = model ,
85-
tokens = tokens )
62+
def parse_response(self, response, duration):
63+
choice = response.choices[0]
64+
message = choice.message
65+
usage = response.usage
66+
response_json = str_to_json(message.content)
67+
response_parsed = message.parsed
68+
tokens = usage.total_tokens
69+
return dict(response_json = response_json ,
70+
response_parsed = response_parsed ,
71+
duration = duration ,
72+
llm_model = self.llm_model ,
73+
response_schema = self.response_format.__name__,
74+
seed = self.seed ,
75+
temperature = self.temperature ,
76+
timestamp = Timestamp_Now() ,
77+
tokens = tokens )

osbot_llms/llms/prompt_to_json/use_cases/Json_Prompt__Create_Summary.py

Lines changed: 2 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,6 @@
1-
from pydantic import BaseModel
2-
1+
from pydantic import BaseModel
32
from osbot_llms.llms.prompt_to_json.Prompt_To_Json__Open_AI import Prompt_To_Json__Open_AI
4-
from osbot_utils.base_classes.Type_Safe import Type_Safe
3+
from osbot_utils.base_classes.Type_Safe import Type_Safe
54

65

76
class Model__Response_Format__Json_Prompt__Create_Summary(BaseModel):
Lines changed: 65 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,65 @@
1+
from pydantic import BaseModel
2+
from typing import List, Literal
3+
from osbot_llms.llms.prompt_to_json.Prompt_To_Json__Open_AI import Prompt_To_Json__Open_AI
4+
from osbot_utils.base_classes.Type_Safe import Type_Safe
5+
6+
class DocumentChange(BaseModel):
7+
type : Literal['addition', 'deletion', 'addition', 'formatting']
8+
original: str
9+
updated : str
10+
reason : str
11+
12+
class DocumentResponse(BaseModel):
13+
new_version: str
14+
changes : List[DocumentChange]
15+
summary : str
16+
17+
class Model__Response_Format__Json_Prompt__Document_Improver(BaseModel):
18+
document: DocumentResponse
19+
status : Literal['success', 'error', 'partial_success', 'no_changes_needed', 'validation_failed']
20+
21+
22+
class Json_Prompt__Document_Improver(Type_Safe):
23+
prompt_to_json: Prompt_To_Json__Open_AI
24+
response_format: type = None
25+
26+
def __init__(self, **kwargs):
27+
super().__init__(**kwargs)
28+
self.response_format = Model__Response_Format__Json_Prompt__Document_Improver
29+
30+
def improve_document(self, current_content: str, improvement_request: str):
31+
with self.prompt_to_json as _:
32+
_.set_model__gpt_4o_mini()
33+
_.set_response_format(self.response_format)
34+
_.add_message__system(self.system_prompt())
35+
_.add_message__user(f"""\
36+
37+
------------- Current document content: -------------
38+
{current_content}
39+
------------------------------------------------------
40+
41+
------------- Improvement request: -------------
42+
43+
{improvement_request}
44+
45+
------------------------------------------------------""")
46+
return _.invoke()
47+
48+
def system_prompt(self):
49+
return """You are a document improvement assistant. Follow these rules exactly:
50+
51+
1. Preserve document structure and formatting
52+
2. Maintain key information integrity
53+
3. Keep consistent writing style
54+
5. Only make requested improvements
55+
6. Provide clear reasoning for changes
56+
57+
For each change:
58+
- Include original and modified text
59+
- Explain modification rationale
60+
61+
Return a structured response with:
62+
- Complete updated document
63+
- Detailed change log
64+
- Change summary
65+
- Status indicator"""

0 commit comments

Comments
 (0)