Skip to content

Commit 1694ab1

Browse files
committed
perf: Resurrection Kael
1 parent 473d750 commit 1694ab1

File tree

3 files changed

+46
-12
lines changed

3 files changed

+46
-12
lines changed

pkg/httpd/chat.go

+31-7
Original file line numberDiff line numberDiff line change
@@ -43,7 +43,7 @@ func (h *chat) HandleMessage(msg *Message) {
4343
conversation = &AIConversation{
4444
Id: id,
4545
Prompt: msg.Prompt,
46-
HistoryRecords: make([]string, 0),
46+
Context: make([]QARecord, 0),
4747
InterruptCurrentChat: false,
4848
}
4949

@@ -65,14 +65,16 @@ func (h *chat) HandleMessage(msg *Message) {
6565
return
6666
}
6767

68+
conversation.Question = msg.Data
69+
6870
openAIParam := &OpenAIParam{
6971
AuthToken: h.termConf.GptApiKey,
7072
BaseURL: h.termConf.GptBaseUrl,
7173
Proxy: h.termConf.GptProxy,
7274
Model: h.termConf.GptModel,
7375
Prompt: conversation.Prompt,
7476
}
75-
conversation.HistoryRecords = append(conversation.HistoryRecords, msg.Data)
77+
//conversation.HistoryRecords = append(conversation.HistoryRecords, msg.Data)
7678
go h.chat(openAIParam, conversation)
7779
}
7880

@@ -90,30 +92,44 @@ func (h *chat) chat(
9092
chatGPTParam.Proxy,
9193
)
9294

93-
startIndex := len(conversation.HistoryRecords) - 15
95+
startIndex := len(conversation.Context) - 8
9496
if startIndex < 0 {
9597
startIndex = 0
9698
}
97-
contents := conversation.HistoryRecords[startIndex:]
99+
conversation.Context = conversation.Context[startIndex:]
100+
context := conversation.Context
101+
102+
chatContext := make([]openai.ChatCompletionMessage, 0)
103+
for _, record := range context {
104+
chatContext = append(chatContext, openai.ChatCompletionMessage{
105+
Role: openai.ChatMessageRoleUser,
106+
Content: record.Question,
107+
})
108+
chatContext = append(chatContext, openai.ChatCompletionMessage{
109+
Role: openai.ChatMessageRoleAssistant,
110+
Content: record.Answer,
111+
})
112+
}
98113

99114
openAIConn := &srvconn.OpenAIConn{
100115
Id: conversation.Id,
101116
Client: c,
102117
Prompt: chatGPTParam.Prompt,
103118
Model: chatGPTParam.Model,
104-
Contents: contents,
119+
Question: conversation.Question,
120+
Context: chatContext,
105121
IsReasoning: false,
106122
AnswerCh: answerCh,
107123
DoneCh: doneCh,
108124
Type: h.termConf.ChatAIType,
109125
}
110126

111127
go openAIConn.Chat(&conversation.InterruptCurrentChat)
112-
return h.processChatMessages(openAIConn)
128+
return h.processChatMessages(openAIConn, conversation)
113129
}
114130

115131
func (h *chat) processChatMessages(
116-
openAIConn *srvconn.OpenAIConn,
132+
openAIConn *srvconn.OpenAIConn, conversation *AIConversation,
117133
) string {
118134
messageID := common.UUID()
119135
id := openAIConn.Id
@@ -123,6 +139,14 @@ func (h *chat) processChatMessages(
123139
h.sendSessionMessage(id, answer, messageID, "message", openAIConn.IsReasoning)
124140
case answer := <-openAIConn.DoneCh:
125141
h.sendSessionMessage(id, answer, messageID, "finish", false)
142+
runes := []rune(answer)
143+
if len(runes) > 100 {
144+
answer = string(runes[:100])
145+
}
146+
conversation.Context = append(conversation.Context, QARecord{
147+
Question: conversation.Question,
148+
Answer: answer,
149+
})
126150
return answer
127151
}
128152
}

pkg/httpd/message.go

+7-1
Original file line numberDiff line numberDiff line change
@@ -163,10 +163,16 @@ type OpenAIParam struct {
163163
Type string
164164
}
165165

166+
type QARecord struct {
167+
Question string
168+
Answer string
169+
}
170+
166171
type AIConversation struct {
167172
Id string
168173
Prompt string
169-
HistoryRecords []string
174+
Question string
175+
Context []QARecord
170176
InterruptCurrentChat bool
171177
}
172178

pkg/srvconn/conn_openai.go

+8-4
Original file line numberDiff line numberDiff line change
@@ -93,7 +93,8 @@ type OpenAIConn struct {
9393
Client *openai.Client
9494
Model string
9595
Prompt string
96-
Contents []string
96+
Question string
97+
Context []openai.ChatCompletionMessage
9798
IsReasoning bool
9899
AnswerCh chan string
99100
DoneCh chan string
@@ -102,11 +103,10 @@ type OpenAIConn struct {
102103

103104
func (conn *OpenAIConn) Chat(interruptCurrentChat *bool) {
104105
ctx := context.Background()
105-
var messages []openai.ChatCompletionMessage
106106

107-
messages = append(messages, openai.ChatCompletionMessage{
107+
messages := append(conn.Context, openai.ChatCompletionMessage{
108108
Role: openai.ChatMessageRoleUser,
109-
Content: strings.Join(conn.Contents, "\n"),
109+
Content: conn.Question,
110110
})
111111

112112
systemPrompt := conn.Prompt
@@ -182,6 +182,10 @@ func (conn *OpenAIConn) Chat(interruptCurrentChat *bool) {
182182
newContent = response.Choices[0].Delta.Content
183183
}
184184

185+
if newContent == "" {
186+
continue
187+
}
188+
185189
content += newContent
186190
conn.AnswerCh <- content
187191
}

0 commit comments

Comments
 (0)