Skip to content

Commit 1790140

Browse files
committed
Gracefully close thread when there's an exception in the openai llm thread. Closes #894.
1 parent 6ed68b5 commit 1790140

File tree

1 file changed

+32
-29
lines changed
  • src/khoj/processor/conversation/openai

1 file changed

+32
-29
lines changed

src/khoj/processor/conversation/openai/utils.py

+32-29
Original file line numberDiff line numberDiff line change
@@ -100,34 +100,37 @@ def chat_completion_with_backoff(
100100

101101

102102
def llm_thread(g, messages, model_name, temperature, openai_api_key=None, api_base_url=None, model_kwargs=None):
103-
client_key = f"{openai_api_key}--{api_base_url}"
104-
if client_key not in openai_clients:
105-
client: openai.OpenAI = openai.OpenAI(
106-
api_key=openai_api_key,
107-
base_url=api_base_url,
103+
try:
104+
client_key = f"{openai_api_key}--{api_base_url}"
105+
if client_key not in openai_clients:
106+
client: openai.OpenAI = openai.OpenAI(
107+
api_key=openai_api_key,
108+
base_url=api_base_url,
109+
)
110+
openai_clients[client_key] = client
111+
else:
112+
client: openai.OpenAI = openai_clients[client_key]
113+
114+
formatted_messages = [{"role": message.role, "content": message.content} for message in messages]
115+
116+
chat = client.chat.completions.create(
117+
stream=True,
118+
messages=formatted_messages,
119+
model=model_name, # type: ignore
120+
temperature=temperature,
121+
timeout=20,
122+
**(model_kwargs or dict()),
108123
)
109-
openai_clients[client_key] = client
110-
else:
111-
client: openai.OpenAI = openai_clients[client_key]
112-
113-
formatted_messages = [{"role": message.role, "content": message.content} for message in messages]
114-
115-
chat = client.chat.completions.create(
116-
stream=True,
117-
messages=formatted_messages,
118-
model=model_name, # type: ignore
119-
temperature=temperature,
120-
timeout=20,
121-
**(model_kwargs or dict()),
122-
)
123-
124-
for chunk in chat:
125-
if len(chunk.choices) == 0:
126-
continue
127-
delta_chunk = chunk.choices[0].delta
128-
if isinstance(delta_chunk, str):
129-
g.send(delta_chunk)
130-
elif delta_chunk.content:
131-
g.send(delta_chunk.content)
132124

133-
g.close()
125+
for chunk in chat:
126+
if len(chunk.choices) == 0:
127+
continue
128+
delta_chunk = chunk.choices[0].delta
129+
if isinstance(delta_chunk, str):
130+
g.send(delta_chunk)
131+
elif delta_chunk.content:
132+
g.send(delta_chunk.content)
133+
except Exception as e:
134+
logger.error(f"Error in llm_thread: {e}")
135+
finally:
136+
g.close()

0 commit comments

Comments
 (0)