File tree Expand file tree Collapse file tree 2 files changed +11
-5
lines changed
Expand file tree Collapse file tree 2 files changed +11
-5
lines changed Original file line number Diff line number Diff line change @@ -112,9 +112,14 @@ def get_message_store(self) -> BasicChatMessageStore:
112112
113113 def get_chat_messages (self ) -> List [Dict [str , str ]]:
114114 if self .strategy == BasicChatHistoryStrategy .last_k_messages :
115- messages = [self .message_store .get_message (0 )]
116- messages .extend (self .message_store .get_last_k_messages (self .k - 1 ))
117- return convert_messages_to_list_of_dictionaries (messages )
115+ converted_messages = convert_messages_to_list_of_dictionaries (
116+ self .message_store .get_last_k_messages (self .k - 1 )
117+ )
118+ if len (converted_messages ) == self .k and converted_messages [0 ]["role" ] != "system" :
119+ messages = [convert_messages_to_list_of_dictionaries (self .message_store .get_message (0 ))]
120+ messages .extend (converted_messages [1 :])
121+ return messages
122+ return converted_messages
118123 elif self .strategy == BasicChatHistoryStrategy .last_k_tokens :
119124 total_tokens = 0
120125 selected_messages = []
Original file line number Diff line number Diff line change 77from llama_cpp import Llama
88from pydantic import BaseModel , Field
99
10+ from .chat_history .messages import Roles
1011from .llm_output_settings import LlmStructuredOutputSettings , LlmStructuredOutputType
1112
1213from .llm_agent import LlamaCppAgent , StreamingResponse
@@ -224,7 +225,7 @@ def generate_response(
224225 llm_sampling_settings : LlmSamplingSettings = None ,
225226 structured_output_settings : LlmStructuredOutputSettings = None ,
226227 ):
227- self .llama_cpp_agent .add_message (role = " user" , message = message )
228+ self .llama_cpp_agent .add_message (role = Roles . user , message = message )
228229
229230 result = self .intern_get_response (llm_sampling_settings = llm_sampling_settings )
230231
@@ -253,7 +254,7 @@ def generate_response(
253254 else :
254255 function_message += f"{ count } . " + res + "\n \n "
255256 self .llama_cpp_agent .add_message (
256- role = " tool" , message = function_message .strip ()
257+ role = Roles . tool , message = function_message .strip ()
257258 )
258259 if agent_sent_message :
259260 break
You can’t perform that action at this time.
0 commit comments