From b9a889ab697f68c544ddbef2f5fd31f6f3df8427 Mon Sep 17 00:00:00 2001 From: Debanjum Date: Thu, 21 Nov 2024 14:31:58 -0800 Subject: [PATCH] Fix Khoj responses when code generated charts in response context The current fix should improve Khoj responses when charts in response context. It truncates code context before sharing with response chat actors. Previously Khoj would respond with it not being able to create chart but than have a generated chart in it's response in default mode. The truncate code context was added to research chat actor for decision making but it wasn't added to conversation response generation chat actors. When khoj generated charts with code for its response, the images in the context would exceed context window limits. So the truncation logic to drop all past context, including chat history, context gathered for current response. This would result in chat response generator 'forgetting' all for the current response when code generated images, charts in response context. --- .../conversation/anthropic/anthropic_chat.py | 10 ++++++++-- src/khoj/processor/conversation/google/gemini_chat.py | 10 ++++++++-- src/khoj/processor/conversation/offline/chat_model.py | 5 ++++- src/khoj/processor/conversation/openai/gpt.py | 11 +++++++++-- 4 files changed, 29 insertions(+), 7 deletions(-) diff --git a/src/khoj/processor/conversation/anthropic/anthropic_chat.py b/src/khoj/processor/conversation/anthropic/anthropic_chat.py index 57fc9b23..65e28d21 100644 --- a/src/khoj/processor/conversation/anthropic/anthropic_chat.py +++ b/src/khoj/processor/conversation/anthropic/anthropic_chat.py @@ -19,7 +19,11 @@ from khoj.processor.conversation.utils import ( generate_chatml_messages_with_context, messages_to_print, ) -from khoj.utils.helpers import ConversationCommand, is_none_or_empty +from khoj.utils.helpers import ( + ConversationCommand, + is_none_or_empty, + truncate_code_context, +) from khoj.utils.rawconfig import LocationData from khoj.utils.yaml import yaml_dump @@ -197,7 +201,9 @@ def converse_anthropic( if ConversationCommand.Online in conversation_commands or ConversationCommand.Webpage in conversation_commands: context_message += f"{prompts.online_search_conversation.format(online_results=yaml_dump(online_results))}\n\n" if ConversationCommand.Code in conversation_commands and not is_none_or_empty(code_results): - context_message += f"{prompts.code_executed_context.format(code_results=str(code_results))}\n\n" + context_message += ( + f"{prompts.code_executed_context.format(code_results=truncate_code_context(code_results))}\n\n" + ) context_message = context_message.strip() # Setup Prompt with Primer or Conversation History diff --git a/src/khoj/processor/conversation/google/gemini_chat.py b/src/khoj/processor/conversation/google/gemini_chat.py index 5bb74da4..965d3010 100644 --- a/src/khoj/processor/conversation/google/gemini_chat.py +++ b/src/khoj/processor/conversation/google/gemini_chat.py @@ -19,7 +19,11 @@ from khoj.processor.conversation.utils import ( generate_chatml_messages_with_context, messages_to_print, ) -from khoj.utils.helpers import ConversationCommand, is_none_or_empty +from khoj.utils.helpers import ( + ConversationCommand, + is_none_or_empty, + truncate_code_context, +) from khoj.utils.rawconfig import LocationData from khoj.utils.yaml import yaml_dump @@ -208,7 +212,9 @@ def converse_gemini( if ConversationCommand.Online in conversation_commands or ConversationCommand.Webpage in conversation_commands: context_message += f"{prompts.online_search_conversation.format(online_results=yaml_dump(online_results))}\n\n" if ConversationCommand.Code in conversation_commands and not is_none_or_empty(code_results): - context_message += f"{prompts.code_executed_context.format(code_results=str(code_results))}\n\n" + context_message += ( + f"{prompts.code_executed_context.format(code_results=truncate_code_context(code_results))}\n\n" + ) context_message = context_message.strip() # Setup Prompt with Primer or Conversation History diff --git a/src/khoj/processor/conversation/offline/chat_model.py b/src/khoj/processor/conversation/offline/chat_model.py index 66660c43..b1ab77fe 100644 --- a/src/khoj/processor/conversation/offline/chat_model.py +++ b/src/khoj/processor/conversation/offline/chat_model.py @@ -24,6 +24,7 @@ from khoj.utils.helpers import ( in_debug_mode, is_none_or_empty, is_promptrace_enabled, + truncate_code_context, ) from khoj.utils.rawconfig import LocationData from khoj.utils.yaml import yaml_dump @@ -211,7 +212,9 @@ def converse_offline( context_message += f"{prompts.online_search_conversation_offline.format(online_results=yaml_dump(simplified_online_results))}\n\n" if ConversationCommand.Code in conversation_commands and not is_none_or_empty(code_results): - context_message += f"{prompts.code_executed_context.format(code_results=str(code_results))}\n\n" + context_message += ( + f"{prompts.code_executed_context.format(code_results=truncate_code_context(code_results))}\n\n" + ) context_message = context_message.strip() # Setup Prompt with Primer or Conversation History diff --git a/src/khoj/processor/conversation/openai/gpt.py b/src/khoj/processor/conversation/openai/gpt.py index 13b53911..e525fa75 100644 --- a/src/khoj/processor/conversation/openai/gpt.py +++ b/src/khoj/processor/conversation/openai/gpt.py @@ -17,7 +17,11 @@ from khoj.processor.conversation.utils import ( generate_chatml_messages_with_context, messages_to_print, ) -from khoj.utils.helpers import ConversationCommand, is_none_or_empty +from khoj.utils.helpers import ( + ConversationCommand, + is_none_or_empty, + truncate_code_context, +) from khoj.utils.rawconfig import LocationData from khoj.utils.yaml import yaml_dump @@ -196,7 +200,10 @@ def converse( if not is_none_or_empty(online_results): context_message += f"{prompts.online_search_conversation.format(online_results=yaml_dump(online_results))}\n\n" if not is_none_or_empty(code_results): - context_message += f"{prompts.code_executed_context.format(code_results=str(code_results))}\n\n" + context_message += ( + f"{prompts.code_executed_context.format(code_results=truncate_code_context(code_results))}\n\n" + ) + context_message = context_message.strip() # Setup Prompt with Primer or Conversation History