Skip to content

Commit e7e21ea

Browse files
committed
revision
Signed-off-by: xtzhang1122 <[email protected]>
1 parent 439b872 commit e7e21ea

File tree

1 file changed

+26
-56
lines changed

1 file changed

+26
-56
lines changed

Diff for: src/core/routes/test.py

+26-56
Original file line numberDiff line numberDiff line change
@@ -34,83 +34,53 @@ class RequestConversation(BaseModel):
3434
}
3535

3636

37-
def get_hyperledger_fabric_answer(question):
38-
return responses.get(question, "Question not found in the database.")
39-
40-
4137
def normalize_question(question: str) -> str:
4238
# Convert to lowercase and strip punctuation
4339
question = question.rstrip()
4440
return re.sub(r'[^\w\s]', '', question.lower())
4541

4642

47-
async def conversation_stream(offset: int = 0, limit: int = 30, order: str = "updated") -> AsyncGenerator[ResponseConversation, None]:
48-
# Normalize the keys in the responses dictionary
49-
normalized_responses = {normalize_question(k): v for k, v in responses.items()}
50-
51-
# Retrieve items based on offset and limit
52-
items = list(normalized_responses.items())[offset:offset + limit]
53-
54-
for idx, (_, answer) in enumerate(items):
55-
conversation = ResponseConversation(
43+
def create_conversation_response(content: str) -> ResponseConversation:
44+
return ResponseConversation(
45+
id=str(uuid.uuid4()),
46+
message=ResponseMessage(
47+
content=content,
48+
type=1,
5649
id=str(uuid.uuid4()),
57-
message=ResponseMessage(
58-
content=answer,
59-
type=1,
60-
id=str(uuid.uuid4()),
61-
)
6250
)
63-
yield f"data: {conversation.json()}\n\n"
64-
await asyncio.sleep(0.1) # Simulate processing time
51+
)
6552

6653

67-
@router.post("/conversations")
54+
@router.get("/conversations", response_model=List[ResponseConversation])
6855
def get_conversations(
6956
offset: int = 0, limit: int = 30, order: str = "updated"
7057
) -> List[ResponseConversation]:
7158
normalized_responses = {normalize_question(k): v for k, v in responses.items()}
7259
items = list(normalized_responses.items())[offset:offset + limit]
73-
conversations = [
74-
ResponseConversation(
75-
id=str(uuid.uuid4()),
76-
message=ResponseMessage(
77-
content=answer,
78-
type=1,
79-
id=str(uuid.uuid4()),
80-
)
81-
) for _, answer in items
82-
]
83-
return conversations
8460

61+
return [create_conversation_response(answer) for _, answer in items]
8562

86-
async def single_conversation_stream(question: str) -> AsyncGenerator[ResponseConversation, None]:
87-
question = normalize_question(question)
63+
64+
@router.get("/conversation/{id}", response_model=ResponseConversation)
65+
def get_single_conversation(id: str) -> ResponseConversation:
66+
question = normalize_question(id)
8867
answer = responses.get(question, "Question not found")
8968

90-
conversation = ResponseConversation(
91-
id=str(uuid.uuid4()),
92-
message=ResponseMessage(
93-
content=answer,
94-
type=1,
95-
id=str(uuid.uuid4()),
96-
)
97-
)
98-
yield f"data: {conversation.json()}\n\n"
99-
await asyncio.sleep(0.1) # Simulate processing time
69+
return create_conversation_response(answer)
10070

10171

102-
@router.post("/conversation/{id}")
103-
def post_conversation(id: str):
104-
return StreamingResponse(single_conversation_stream(id), media_type="application/json")
72+
async def single_conversation_stream(question: str) -> AsyncGenerator[str, None]:
73+
question = normalize_question(question)
74+
answer = responses.get(question, "Question not found")
10575

76+
conversation = create_conversation_response(answer)
77+
yield f"data: {conversation.json()}\n\n"
78+
await asyncio.sleep(0.1)
10679

107-
@router.post("/conversation", response_model=ResponseConversation)
108-
def post_conversation(item: RequestConversation) -> ResponseConversation:
109-
return ResponseConversation(
110-
id=item.id,
111-
message=ResponseMessage(
112-
content=get_hyperledger_fabric_answer(item.content),
113-
type=1,
114-
id=str(uuid.uuid4()),
115-
),
80+
81+
@router.post("/conversation")
82+
async def post_conversation(item: RequestConversation) -> StreamingResponse:
83+
return StreamingResponse(
84+
single_conversation_stream(item.content),
85+
media_type="text/event-stream"
11686
)

0 commit comments

Comments
 (0)