From 5927ca803277901590466ac3deb621f5a0e67210 Mon Sep 17 00:00:00 2001 From: Debanjum Singh Solanky Date: Fri, 23 Aug 2024 02:06:26 -0700 Subject: [PATCH] Properly close chat stream iterator even if response generation fails Previously chat stream iterator wasn't closed when response streaming for offline chat model threw an exception. This would require restarting the application. Now application doesn't hang even if current response generation fails with exception --- src/khoj/processor/conversation/offline/chat_model.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/khoj/processor/conversation/offline/chat_model.py b/src/khoj/processor/conversation/offline/chat_model.py index cc8d7702..8a58e181 100644 --- a/src/khoj/processor/conversation/offline/chat_model.py +++ b/src/khoj/processor/conversation/offline/chat_model.py @@ -224,7 +224,7 @@ def llm_thread(g, messages: List[ChatMessage], model: Any, max_prompt_size: int g.send(response["choices"][0]["delta"].get("content", "")) finally: state.chat_lock.release() - g.close() + g.close() def send_message_to_model_offline(