mirror of
https://github.com/khoj-ai/khoj.git
synced 2024-12-18 18:47:11 +00:00
Don't include null attributes when filling in stored conversation metadata
- Prompt adjustments to indicate to LLM what context it has
This commit is contained in:
parent
c5329d76ba
commit
a0b00ce4a1
3 changed files with 25 additions and 20 deletions
|
@ -108,7 +108,7 @@ class ChatMessage(PydanticBaseModel):
|
|||
created: str
|
||||
images: Optional[List[str]] = None
|
||||
queryFiles: Optional[List[Dict]] = None
|
||||
excalidrawDiagram: Optional[str] = None
|
||||
excalidrawDiagram: Optional[List[Dict]] = None
|
||||
by: str
|
||||
turnId: Optional[str]
|
||||
intent: Optional[Intent] = None
|
||||
|
|
|
@ -186,7 +186,7 @@ Here is the image you generated based on my query. You can follow-up with a gene
|
|||
|
||||
generated_diagram_attachment = PromptTemplate.from_template(
|
||||
f"""
|
||||
The AI has successfully created a diagram based on the user's query and handled the request. Good job!
|
||||
The AI has successfully created a diagram based on the user's query and handled the request. Good job! This will be shared with the user.
|
||||
|
||||
AI can follow-up with a general response or summary. Limit to 1-2 sentences.
|
||||
""".strip()
|
||||
|
|
|
@ -276,27 +276,32 @@ def save_to_conversation_log(
|
|||
):
|
||||
user_message_time = user_message_time or datetime.now().strftime("%Y-%m-%d %H:%M:%S")
|
||||
turn_id = tracer.get("mid") or str(uuid.uuid4())
|
||||
|
||||
user_message_metadata = {"created": user_message_time, "images": query_images, "turnId": turn_id}
|
||||
|
||||
if raw_query_files and len(raw_query_files) > 0:
|
||||
user_message_metadata["queryFiles"] = [file.model_dump(mode="json") for file in raw_query_files]
|
||||
|
||||
khoj_message_metadata = {
|
||||
"context": compiled_references,
|
||||
"intent": {"inferred-queries": inferred_queries, "type": intent_type},
|
||||
"onlineContext": online_results,
|
||||
"codeContext": code_results,
|
||||
"automationId": automation_id,
|
||||
"trainOfThought": train_of_thought,
|
||||
"turnId": turn_id,
|
||||
"images": generated_images,
|
||||
"queryFiles": [file.model_dump(mode="json") for file in raw_generated_files],
|
||||
}
|
||||
|
||||
if generated_excalidraw_diagram:
|
||||
khoj_message_metadata["excalidrawDiagram"] = generated_excalidraw_diagram
|
||||
|
||||
updated_conversation = message_to_log(
|
||||
user_message=q,
|
||||
chat_response=chat_response,
|
||||
user_message_metadata={
|
||||
"created": user_message_time,
|
||||
"images": query_images,
|
||||
"turnId": turn_id,
|
||||
"queryFiles": [file.model_dump(mode="json") for file in raw_query_files],
|
||||
},
|
||||
khoj_message_metadata={
|
||||
"context": compiled_references,
|
||||
"intent": {"inferred-queries": inferred_queries, "type": intent_type},
|
||||
"onlineContext": online_results,
|
||||
"codeContext": code_results,
|
||||
"automationId": automation_id,
|
||||
"trainOfThought": train_of_thought,
|
||||
"turnId": turn_id,
|
||||
"images": generated_images,
|
||||
"queryFiles": [file.model_dump(mode="json") for file in raw_generated_files],
|
||||
"excalidrawDiagram": str(generated_excalidraw_diagram) if generated_excalidraw_diagram else None,
|
||||
},
|
||||
user_message_metadata=user_message_metadata,
|
||||
khoj_message_metadata=khoj_message_metadata,
|
||||
conversation_log=meta_log.get("chat", []),
|
||||
)
|
||||
ConversationAdapters.save_conversation(
|
||||
|
|
Loading…
Reference in a new issue