mirror of
https://github.com/khoj-ai/khoj.git
synced 2024-11-27 17:35:07 +01:00
Put context into separate message before sending to offline chat model
Align context passed to offline chat model with other chat models - Pass context in separate message for better separation between user query and the shared context - Pass filename in context - Add online results for webpage conversation command
This commit is contained in:
parent
ee0789eb3d
commit
8ddd70f3a9
1 changed files with 11 additions and 9 deletions
|
@ -153,7 +153,7 @@ def converse_offline(
|
||||||
# Initialize Variables
|
# Initialize Variables
|
||||||
assert loaded_model is None or isinstance(loaded_model, Llama), "loaded_model must be of type Llama, if configured"
|
assert loaded_model is None or isinstance(loaded_model, Llama), "loaded_model must be of type Llama, if configured"
|
||||||
offline_chat_model = loaded_model or download_model(model, max_tokens=max_prompt_size)
|
offline_chat_model = loaded_model or download_model(model, max_tokens=max_prompt_size)
|
||||||
compiled_references_message = "\n\n".join({f"{item['compiled']}" for item in references})
|
compiled_references = "\n\n".join({f"# File: {item['file']}\n## {item['compiled']}\n" for item in references})
|
||||||
|
|
||||||
current_date = datetime.now()
|
current_date = datetime.now()
|
||||||
|
|
||||||
|
@ -170,8 +170,6 @@ def converse_offline(
|
||||||
day_of_week=current_date.strftime("%A"),
|
day_of_week=current_date.strftime("%A"),
|
||||||
)
|
)
|
||||||
|
|
||||||
conversation_primer = prompts.query_prompt.format(query=user_query)
|
|
||||||
|
|
||||||
if location_data:
|
if location_data:
|
||||||
location_prompt = prompts.user_location.format(location=f"{location_data}")
|
location_prompt = prompts.user_location.format(location=f"{location_data}")
|
||||||
system_prompt = f"{system_prompt}\n{location_prompt}"
|
system_prompt = f"{system_prompt}\n{location_prompt}"
|
||||||
|
@ -181,27 +179,31 @@ def converse_offline(
|
||||||
system_prompt = f"{system_prompt}\n{user_name_prompt}"
|
system_prompt = f"{system_prompt}\n{user_name_prompt}"
|
||||||
|
|
||||||
# Get Conversation Primer appropriate to Conversation Type
|
# Get Conversation Primer appropriate to Conversation Type
|
||||||
if conversation_commands == [ConversationCommand.Notes] and is_none_or_empty(compiled_references_message):
|
if conversation_commands == [ConversationCommand.Notes] and is_none_or_empty(compiled_references):
|
||||||
return iter([prompts.no_notes_found.format()])
|
return iter([prompts.no_notes_found.format()])
|
||||||
elif conversation_commands == [ConversationCommand.Online] and is_none_or_empty(online_results):
|
elif conversation_commands == [ConversationCommand.Online] and is_none_or_empty(online_results):
|
||||||
completion_func(chat_response=prompts.no_online_results_found.format())
|
completion_func(chat_response=prompts.no_online_results_found.format())
|
||||||
return iter([prompts.no_online_results_found.format()])
|
return iter([prompts.no_online_results_found.format()])
|
||||||
|
|
||||||
if ConversationCommand.Online in conversation_commands:
|
context_message = ""
|
||||||
|
if not is_none_or_empty(compiled_references):
|
||||||
|
context_message += f"{prompts.notes_conversation_offline.format(references=compiled_references)}\n\n"
|
||||||
|
if ConversationCommand.Online in conversation_commands or ConversationCommand.Webpage in conversation_commands:
|
||||||
simplified_online_results = online_results.copy()
|
simplified_online_results = online_results.copy()
|
||||||
for result in online_results:
|
for result in online_results:
|
||||||
if online_results[result].get("webpages"):
|
if online_results[result].get("webpages"):
|
||||||
simplified_online_results[result] = online_results[result]["webpages"]
|
simplified_online_results[result] = online_results[result]["webpages"]
|
||||||
|
|
||||||
conversation_primer = f"{prompts.online_search_conversation_offline.format(online_results=str(simplified_online_results))}\n{conversation_primer}"
|
context_message += (
|
||||||
if not is_none_or_empty(compiled_references_message):
|
f"{prompts.online_search_conversation_offline.format(online_results=str(simplified_online_results))}"
|
||||||
conversation_primer = f"{prompts.notes_conversation_offline.format(references=compiled_references_message)}\n\n{conversation_primer}"
|
)
|
||||||
|
|
||||||
# Setup Prompt with Primer or Conversation History
|
# Setup Prompt with Primer or Conversation History
|
||||||
messages = generate_chatml_messages_with_context(
|
messages = generate_chatml_messages_with_context(
|
||||||
conversation_primer,
|
user_query,
|
||||||
system_prompt,
|
system_prompt,
|
||||||
conversation_log,
|
conversation_log,
|
||||||
|
context_message=context_message,
|
||||||
model_name=model,
|
model_name=model,
|
||||||
loaded_model=offline_chat_model,
|
loaded_model=offline_chat_model,
|
||||||
max_prompt_size=max_prompt_size,
|
max_prompt_size=max_prompt_size,
|
||||||
|
|
Loading…
Reference in a new issue