@@ -34,83 +34,53 @@ class RequestConversation(BaseModel):
34
34
}
35
35
36
36
37
- def get_hyperledger_fabric_answer (question ):
38
- return responses .get (question , "Question not found in the database." )
39
-
40
-
41
37
def normalize_question (question : str ) -> str :
42
38
# Convert to lowercase and strip punctuation
43
39
question = question .rstrip ()
44
40
return re .sub (r'[^\w\s]' , '' , question .lower ())
45
41
46
42
47
- async def conversation_stream (offset : int = 0 , limit : int = 30 , order : str = "updated" ) -> AsyncGenerator [ResponseConversation , None ]:
48
- # Normalize the keys in the responses dictionary
49
- normalized_responses = {normalize_question (k ): v for k , v in responses .items ()}
50
-
51
- # Retrieve items based on offset and limit
52
- items = list (normalized_responses .items ())[offset :offset + limit ]
53
-
54
- for idx , (_ , answer ) in enumerate (items ):
55
- conversation = ResponseConversation (
43
+ def create_conversation_response (content : str ) -> ResponseConversation :
44
+ return ResponseConversation (
45
+ id = str (uuid .uuid4 ()),
46
+ message = ResponseMessage (
47
+ content = content ,
48
+ type = 1 ,
56
49
id = str (uuid .uuid4 ()),
57
- message = ResponseMessage (
58
- content = answer ,
59
- type = 1 ,
60
- id = str (uuid .uuid4 ()),
61
- )
62
50
)
63
- yield f"data: { conversation .json ()} \n \n "
64
- await asyncio .sleep (0.1 ) # Simulate processing time
51
+ )
65
52
66
53
67
- @router .post ("/conversations" )
54
+ @router .get ("/conversations" , response_model = List [ ResponseConversation ] )
68
55
def get_conversations (
69
56
offset : int = 0 , limit : int = 30 , order : str = "updated"
70
57
) -> List [ResponseConversation ]:
71
58
normalized_responses = {normalize_question (k ): v for k , v in responses .items ()}
72
59
items = list (normalized_responses .items ())[offset :offset + limit ]
73
- conversations = [
74
- ResponseConversation (
75
- id = str (uuid .uuid4 ()),
76
- message = ResponseMessage (
77
- content = answer ,
78
- type = 1 ,
79
- id = str (uuid .uuid4 ()),
80
- )
81
- ) for _ , answer in items
82
- ]
83
- return conversations
84
60
61
+ return [create_conversation_response (answer ) for _ , answer in items ]
85
62
86
- async def single_conversation_stream (question : str ) -> AsyncGenerator [ResponseConversation , None ]:
87
- question = normalize_question (question )
63
+
64
+ @router .get ("/conversation/{id}" , response_model = ResponseConversation )
65
+ def get_single_conversation (id : str ) -> ResponseConversation :
66
+ question = normalize_question (id )
88
67
answer = responses .get (question , "Question not found" )
89
68
90
- conversation = ResponseConversation (
91
- id = str (uuid .uuid4 ()),
92
- message = ResponseMessage (
93
- content = answer ,
94
- type = 1 ,
95
- id = str (uuid .uuid4 ()),
96
- )
97
- )
98
- yield f"data: { conversation .json ()} \n \n "
99
- await asyncio .sleep (0.1 ) # Simulate processing time
69
+ return create_conversation_response (answer )
100
70
101
71
102
- @ router . post ( "/conversation/{id}" )
103
- def post_conversation ( id : str ):
104
- return StreamingResponse ( single_conversation_stream ( id ), media_type = "application/json " )
72
+ async def single_conversation_stream ( question : str ) -> AsyncGenerator [ str , None ]:
73
+ question = normalize_question ( question )
74
+ answer = responses . get ( question , "Question not found " )
105
75
76
+ conversation = create_conversation_response (answer )
77
+ yield f"data: { conversation .json ()} \n \n "
78
+ await asyncio .sleep (0.1 )
106
79
107
- @router .post ("/conversation" , response_model = ResponseConversation )
108
- def post_conversation (item : RequestConversation ) -> ResponseConversation :
109
- return ResponseConversation (
110
- id = item .id ,
111
- message = ResponseMessage (
112
- content = get_hyperledger_fabric_answer (item .content ),
113
- type = 1 ,
114
- id = str (uuid .uuid4 ()),
115
- ),
80
+
81
+ @router .post ("/conversation" )
82
+ async def post_conversation (item : RequestConversation ) -> StreamingResponse :
83
+ return StreamingResponse (
84
+ single_conversation_stream (item .content ),
85
+ media_type = "text/event-stream"
116
86
)
0 commit comments