diff --git a/src/interface/obsidian/src/chat_view.ts b/src/interface/obsidian/src/chat_view.ts index 18001f47..1dcea196 100644 --- a/src/interface/obsidian/src/chat_view.ts +++ b/src/interface/obsidian/src/chat_view.ts @@ -541,12 +541,10 @@ export class KhojChatView extends KhojPaneView { imageMarkdown += `${message}`; } - if (!images || images.length === 0) { - if (inferredQueries) { - imageMarkdown += "\n\n**Inferred Query**:"; - for (let inferredQuery of inferredQueries) { - imageMarkdown += `\n\n${inferredQuery}`; - } + if (images?.length === 0 && inferredQueries) { + imageMarkdown += "\n\n**Inferred Query**:"; + for (let inferredQuery of inferredQueries) { + imageMarkdown += `\n\n${inferredQuery}`; } } return imageMarkdown; diff --git a/src/interface/web/app/components/chatMessage/chatMessage.tsx b/src/interface/web/app/components/chatMessage/chatMessage.tsx index 49ce4c00..89c3038a 100644 --- a/src/interface/web/app/components/chatMessage/chatMessage.tsx +++ b/src/interface/web/app/components/chatMessage/chatMessage.tsx @@ -397,11 +397,6 @@ const ChatMessage = forwardRef((props, ref) => // Prepare initial message for rendering let message = props.chatMessage.message; - if (props.chatMessage.intent && props.chatMessage.intent.type == "excalidraw") { - message = props.chatMessage.intent["inferred-queries"][0]; - setExcalidrawData(props.chatMessage.message); - } - if (props.chatMessage.excalidrawDiagram) { setExcalidrawData(props.chatMessage.excalidrawDiagram); } diff --git a/src/khoj/database/models/__init__.py b/src/khoj/database/models/__init__.py index 521eee9b..3f81fead 100644 --- a/src/khoj/database/models/__init__.py +++ b/src/khoj/database/models/__init__.py @@ -65,12 +65,12 @@ class PeopleAlsoAsk(PydanticBaseModel): class KnowledgeGraph(PydanticBaseModel): attributes: Dict[str, str] - description: str - descriptionLink: str - descriptionSource: str - imageUrl: str + description: Optional[str] = None + descriptionLink: Optional[str] = None + descriptionSource: Optional[str] = None + imageUrl: Optional[str] = None title: str - type: str + type: Optional[str] = None class OrganicContext(PydanticBaseModel): diff --git a/src/khoj/processor/conversation/anthropic/anthropic_chat.py b/src/khoj/processor/conversation/anthropic/anthropic_chat.py index e72146e5..0a242318 100644 --- a/src/khoj/processor/conversation/anthropic/anthropic_chat.py +++ b/src/khoj/processor/conversation/anthropic/anthropic_chat.py @@ -160,7 +160,7 @@ def converse_anthropic( generated_images: Optional[list[str]] = None, generated_files: List[FileAttachment] = None, generated_excalidraw_diagram: Optional[str] = None, - additional_context: Optional[List[str]] = None, + additional_context_for_llm_response: Optional[List[str]] = None, tracer: dict = {}, ): """ @@ -224,7 +224,7 @@ def converse_anthropic( generated_excalidraw_diagram=generated_excalidraw_diagram, generated_files=generated_files, generated_images=generated_images, - additional_program_context=additional_context, + additional_context_for_llm_response=additional_context_for_llm_response, ) messages, system_prompt = format_messages_for_anthropic(messages, system_prompt) diff --git a/src/khoj/processor/conversation/google/gemini_chat.py b/src/khoj/processor/conversation/google/gemini_chat.py index fc49e35f..304511ca 100644 --- a/src/khoj/processor/conversation/google/gemini_chat.py +++ b/src/khoj/processor/conversation/google/gemini_chat.py @@ -170,7 +170,7 @@ def converse_gemini( generated_images: Optional[list[str]] = None, generated_files: List[FileAttachment] = None, generated_excalidraw_diagram: Optional[str] = None, - additional_context: List[str] = None, + additional_context_for_llm_response: List[str] = None, tracer={}, ): """ @@ -235,7 +235,7 @@ def converse_gemini( generated_excalidraw_diagram=generated_excalidraw_diagram, generated_files=generated_files, generated_images=generated_images, - additional_program_context=additional_context, + additional_context_for_llm_response=additional_context_for_llm_response, ) messages, system_prompt = format_messages_for_gemini(messages, system_prompt) diff --git a/src/khoj/processor/conversation/offline/chat_model.py b/src/khoj/processor/conversation/offline/chat_model.py index d493dd30..853f95ec 100644 --- a/src/khoj/processor/conversation/offline/chat_model.py +++ b/src/khoj/processor/conversation/offline/chat_model.py @@ -234,7 +234,7 @@ def converse_offline( model_type=ChatModelOptions.ModelType.OFFLINE, query_files=query_files, generated_files=generated_files, - additional_program_context=additional_context, + additional_context_for_llm_response=additional_context, ) logger.debug(f"Conversation Context for {model}: {messages_to_print(messages)}") diff --git a/src/khoj/processor/conversation/openai/gpt.py b/src/khoj/processor/conversation/openai/gpt.py index 9cfb9620..518e655d 100644 --- a/src/khoj/processor/conversation/openai/gpt.py +++ b/src/khoj/processor/conversation/openai/gpt.py @@ -160,7 +160,7 @@ def converse( generated_images: Optional[list[str]] = None, generated_files: List[FileAttachment] = None, generated_excalidraw_diagram: Optional[str] = None, - additional_context: List[str] = None, + additional_context_for_llm_response: List[str] = None, tracer: dict = {}, ): """ @@ -226,7 +226,7 @@ def converse( generated_excalidraw_diagram=generated_excalidraw_diagram, generated_files=generated_files, generated_images=generated_images, - additional_program_context=additional_context, + additional_context_for_llm_response=additional_context_for_llm_response, ) logger.debug(f"Conversation Context for GPT: {messages_to_print(messages)}") diff --git a/src/khoj/processor/conversation/prompts.py b/src/khoj/processor/conversation/prompts.py index 5e28a912..46dac655 100644 --- a/src/khoj/processor/conversation/prompts.py +++ b/src/khoj/processor/conversation/prompts.py @@ -186,9 +186,7 @@ Here is the image you generated based on my query. You can follow-up with a gene generated_diagram_attachment = PromptTemplate.from_template( f""" -The AI has successfully created a diagram based on the user's query and handled the request. Good job! This will be shared with the user. - -AI can follow-up with a general response or summary. Limit to 1-2 sentences. +I've successfully created a diagram based on the user's query. The diagram will automatically be shared with the user. I can follow-up with a general response or summary. Limit to 1-2 sentences. """.strip() ) diff --git a/src/khoj/processor/conversation/utils.py b/src/khoj/processor/conversation/utils.py index 64d42716..9a7cb24b 100644 --- a/src/khoj/processor/conversation/utils.py +++ b/src/khoj/processor/conversation/utils.py @@ -383,7 +383,7 @@ def generate_chatml_messages_with_context( generated_images: Optional[list[str]] = None, generated_files: List[FileAttachment] = None, generated_excalidraw_diagram: str = None, - additional_program_context: List[str] = [], + additional_context_for_llm_response: List[str] = [], ): """Generate chat messages with appropriate context from previous conversation to send to the chat model""" # Set max prompt size from user config or based on pre-configured for model and machine specs @@ -484,10 +484,12 @@ def generate_chatml_messages_with_context( if generated_excalidraw_diagram: messages.append(ChatMessage(content=prompts.generated_diagram_attachment.format(), role="assistant")) - if additional_program_context: + if additional_context_for_llm_response: messages.append( ChatMessage( - content=prompts.additional_program_context.format(context="\n".join(additional_program_context)), + content=prompts.additional_program_context.format( + context="\n".join(additional_context_for_llm_response) + ), role="assistant", ) ) diff --git a/src/khoj/routers/helpers.py b/src/khoj/routers/helpers.py index d54ae4f7..7d61752c 100644 --- a/src/khoj/routers/helpers.py +++ b/src/khoj/routers/helpers.py @@ -1188,7 +1188,7 @@ def generate_chat_response( generated_images: List[str] = None, raw_generated_files: List[FileAttachment] = [], generated_excalidraw_diagram: str = None, - additional_context: List[str] = [], + additional_context_for_llm_response: List[str] = [], tracer: dict = {}, ) -> Tuple[Union[ThreadedGenerator, Iterator[str]], Dict[str, str]]: # Initialize Variables @@ -1280,7 +1280,7 @@ def generate_chat_response( generated_files=raw_generated_files, generated_images=generated_images, generated_excalidraw_diagram=generated_excalidraw_diagram, - additional_context=additional_context, + additional_context_for_llm_response=additional_context_for_llm_response, tracer=tracer, ) @@ -1307,7 +1307,7 @@ def generate_chat_response( generated_files=raw_generated_files, generated_images=generated_images, generated_excalidraw_diagram=generated_excalidraw_diagram, - additional_context=additional_context, + additional_context_for_llm_response=additional_context_for_llm_response, tracer=tracer, ) elif conversation_config.model_type == ChatModelOptions.ModelType.GOOGLE: @@ -1333,7 +1333,7 @@ def generate_chat_response( generated_files=raw_generated_files, generated_images=generated_images, generated_excalidraw_diagram=generated_excalidraw_diagram, - additional_context=additional_context, + additional_context_for_llm_response=additional_context_for_llm_response, tracer=tracer, )