Skip to content

Commit de88f4a

Browse files
Fixes Chat History and Function Calling Agent.
1 parent e346167 commit de88f4a

File tree

2 files changed

+11
-5
lines changed

2 files changed

+11
-5
lines changed

src/llama_cpp_agent/chat_history/basic_chat_history.py

Lines changed: 8 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -112,9 +112,14 @@ def get_message_store(self) -> BasicChatMessageStore:
112112

113113
def get_chat_messages(self) -> List[Dict[str, str]]:
114114
if self.strategy == BasicChatHistoryStrategy.last_k_messages:
115-
messages = [self.message_store.get_message(0)]
116-
messages.extend(self.message_store.get_last_k_messages(self.k - 1))
117-
return convert_messages_to_list_of_dictionaries(messages)
115+
converted_messages = convert_messages_to_list_of_dictionaries(
116+
self.message_store.get_last_k_messages(self.k - 1)
117+
)
118+
if len(converted_messages) == self.k and converted_messages[0]["role"] != "system":
119+
messages = [convert_messages_to_list_of_dictionaries(self.message_store.get_message(0))]
120+
messages.extend(converted_messages[1:])
121+
return messages
122+
return converted_messages
118123
elif self.strategy == BasicChatHistoryStrategy.last_k_tokens:
119124
total_tokens = 0
120125
selected_messages = []

src/llama_cpp_agent/function_calling_agent.py

Lines changed: 3 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -7,6 +7,7 @@
77
from llama_cpp import Llama
88
from pydantic import BaseModel, Field
99

10+
from .chat_history.messages import Roles
1011
from .llm_output_settings import LlmStructuredOutputSettings, LlmStructuredOutputType
1112

1213
from .llm_agent import LlamaCppAgent, StreamingResponse
@@ -224,7 +225,7 @@ def generate_response(
224225
llm_sampling_settings: LlmSamplingSettings = None,
225226
structured_output_settings: LlmStructuredOutputSettings = None,
226227
):
227-
self.llama_cpp_agent.add_message(role="user", message=message)
228+
self.llama_cpp_agent.add_message(role=Roles.user, message=message)
228229

229230
result = self.intern_get_response(llm_sampling_settings=llm_sampling_settings)
230231

@@ -253,7 +254,7 @@ def generate_response(
253254
else:
254255
function_message += f"{count}. " + res + "\n\n"
255256
self.llama_cpp_agent.add_message(
256-
role="tool", message=function_message.strip()
257+
role=Roles.tool, message=function_message.strip()
257258
)
258259
if agent_sent_message:
259260
break

0 commit comments

Comments
 (0)