Skip to content

Commit 4388f6b

Browse files
authored
[bug-fix] fix issue related to bot memory when using multiple bots at the same time (#486)
1 parent d0956a0 commit 4388f6b

File tree

1 file changed

+5
-7
lines changed

1 file changed

+5
-7
lines changed

embedchain/embedchain.py

+5-7
Original file line numberDiff line numberDiff line change
@@ -26,8 +26,6 @@
2626
ABS_PATH = os.getcwd()
2727
DB_DIR = os.path.join(ABS_PATH, "db")
2828

29-
memory = ConversationBufferMemory()
30-
3129

3230
class EmbedChain:
3331
def __init__(self, config: BaseAppConfig):
@@ -44,6 +42,7 @@ def __init__(self, config: BaseAppConfig):
4442
self.user_asks = []
4543
self.is_docs_site_instance = False
4644
self.online = False
45+
self.memory = ConversationBufferMemory()
4746

4847
# Send anonymous telemetry
4948
self.s_id = self.config.id if self.config.id else str(uuid.uuid4())
@@ -362,8 +361,7 @@ def chat(self, input_query, config: ChatConfig = None, dry_run=False):
362361
k["web_search_result"] = self.access_search_and_get_results(input_query)
363362
contexts = self.retrieve_from_database(input_query, config)
364363

365-
global memory
366-
chat_history = memory.load_memory_variables({})["history"]
364+
chat_history = self.memory.load_memory_variables({})["history"]
367365

368366
if chat_history:
369367
config.set_history(chat_history)
@@ -376,14 +374,14 @@ def chat(self, input_query, config: ChatConfig = None, dry_run=False):
376374

377375
answer = self.get_answer_from_llm(prompt, config)
378376

379-
memory.chat_memory.add_user_message(input_query)
377+
self.memory.chat_memory.add_user_message(input_query)
380378

381379
# Send anonymous telemetry
382380
thread_telemetry = threading.Thread(target=self._send_telemetry_event, args=("chat",))
383381
thread_telemetry.start()
384382

385383
if isinstance(answer, str):
386-
memory.chat_memory.add_ai_message(answer)
384+
self.memory.chat_memory.add_ai_message(answer)
387385
logging.info(f"Answer: {answer}")
388386
return answer
389387
else:
@@ -395,7 +393,7 @@ def _stream_chat_response(self, answer):
395393
for chunk in answer:
396394
streamed_answer = streamed_answer + chunk
397395
yield chunk
398-
memory.chat_memory.add_ai_message(streamed_answer)
396+
self.memory.chat_memory.add_ai_message(streamed_answer)
399397
logging.info(f"Answer: {streamed_answer}")
400398

401399
def set_collection(self, collection_name):

0 commit comments

Comments
 (0)