Pass online context from chat history to chat model for response

Previously only notes context from chat history was included.
This change includes online context from chat history for model to use
for response generation.

This can reduce need for online lookups by reusing previous online
context for faster responses. But will increase overall response time
when not reusing past online context, as faster context buildup per
conversation.

Unsure if inclusion of context is preferrable. If not, both notes and
online context should be removed.
This commit is contained in:
Debanjum Singh Solanky 2024-10-22 02:32:34 -07:00
parent 0c52a1169a
commit 0847fb0102

View file

@ -180,11 +180,15 @@ def generate_chatml_messages_with_context(
# Extract Chat History for Context
chatml_messages: List[ChatMessage] = []
for chat in conversation_log.get("chat", []):
message_context = ""
if not is_none_or_empty(chat.get("context")):
references = "\n\n".join(
{f"# File: {item['file']}\n## {item['compiled']}\n" for item in chat.get("context") or []}
)
message_context = f"{prompts.notes_conversation.format(references=references)}\n\n"
if not is_none_or_empty(chat.get("onlineContext")):
message_context += f"{prompts.online_search_conversation.format(online_results=chat.get('onlineContext'))}"
if not is_none_or_empty(chat.get("context")) or not is_none_or_empty(chat.get("onlineContext")):
reconstructed_context_message = ChatMessage(content=message_context, role="context")
chatml_messages.insert(0, reconstructed_context_message)