@@ -264,7 +264,10 @@ def complete_stream(
264264 doc ["source" ] = "None"
265265
266266 llm = LLMCreator .create_llm (
267- settings .LLM_NAME , api_key = settings .API_KEY , user_api_key = user_api_key
267+ settings .LLM_NAME ,
268+ api_key = settings .API_KEY ,
269+ user_api_key = user_api_key ,
270+ decoded_token = decoded_token ,
268271 )
269272
270273 if should_save_conversation :
@@ -420,6 +423,7 @@ def post(self):
420423 user_api_key = user_api_key ,
421424 prompt = prompt ,
422425 chat_history = history ,
426+ decoded_token = decoded_token ,
423427 )
424428
425429 retriever = RetrieverCreator .create_retriever (
@@ -431,6 +435,7 @@ def post(self):
431435 token_limit = token_limit ,
432436 gpt_model = gpt_model ,
433437 user_api_key = user_api_key ,
438+ decoded_token = decoded_token ,
434439 )
435440
436441 return Response (
@@ -565,6 +570,7 @@ def post(self):
565570 user_api_key = user_api_key ,
566571 prompt = prompt ,
567572 chat_history = history ,
573+ decoded_token = decoded_token ,
568574 )
569575
570576 retriever = RetrieverCreator .create_retriever (
@@ -576,6 +582,7 @@ def post(self):
576582 token_limit = token_limit ,
577583 gpt_model = gpt_model ,
578584 user_api_key = user_api_key ,
585+ decoded_token = decoded_token ,
579586 )
580587
581588 response_full = ""
@@ -623,7 +630,10 @@ def post(self):
623630 doc ["source" ] = "None"
624631
625632 llm = LLMCreator .create_llm (
626- settings .LLM_NAME , api_key = settings .API_KEY , user_api_key = user_api_key
633+ settings .LLM_NAME ,
634+ api_key = settings .API_KEY ,
635+ user_api_key = user_api_key ,
636+ decoded_token = decoded_token ,
627637 )
628638
629639 result = {"answer" : response_full , "sources" : source_log_docs }
@@ -743,6 +753,7 @@ def post(self):
743753 token_limit = token_limit ,
744754 gpt_model = gpt_model ,
745755 user_api_key = user_api_key ,
756+ decoded_token = decoded_token ,
746757 )
747758
748759 docs = retriever .search (question )
0 commit comments