mirror of
https://github.com/khoj-ai/khoj.git
synced 2025-02-17 16:14:21 +00:00
Only use past chat message, not session summaries as chat context
Passing only chat messages for current active, and summaries for past session isn't currently as useful
This commit is contained in:
parent
ad1f1cf620
commit
7cad1c9428
2 changed files with 5 additions and 15 deletions
src/khoj
|
@ -114,7 +114,7 @@ A:{ "search-type": "notes" }"""
|
|||
return json.loads(story.strip(empty_escape_sequences))
|
||||
|
||||
|
||||
def converse(text, user_query, active_session_length=0, conversation_log=None, api_key=None, temperature=0):
|
||||
def converse(text, user_query, conversation_log=None, api_key=None, temperature=0):
|
||||
"""
|
||||
Converse with user using OpenAI's ChatGPT
|
||||
"""
|
||||
|
@ -135,7 +135,6 @@ Question: {user_query}"""
|
|||
messages = generate_chatml_messages_with_context(
|
||||
conversation_primer,
|
||||
personality_primer,
|
||||
active_session_length,
|
||||
conversation_log,
|
||||
)
|
||||
|
||||
|
@ -151,20 +150,12 @@ Question: {user_query}"""
|
|||
return story.strip(empty_escape_sequences)
|
||||
|
||||
|
||||
def generate_chatml_messages_with_context(user_message, system_message, active_session_length=0, conversation_log=None):
|
||||
def generate_chatml_messages_with_context(user_message, system_message, conversation_log=None):
|
||||
"""Generate messages for ChatGPT with context from previous conversation"""
|
||||
# Extract Chat History for Context
|
||||
chat_logs = [chat["message"] for chat in conversation_log.get("chat", [])]
|
||||
session_summaries = [session["summary"] for session in conversation_log.get("session", {})]
|
||||
if active_session_length == 0:
|
||||
last_backnforth = list(map(message_to_chatml, session_summaries[-1:]))
|
||||
rest_backnforth = list(map(message_to_chatml, session_summaries[-2:-1]))
|
||||
elif active_session_length == 1:
|
||||
last_backnforth = reciprocal_conversation_to_chatml(chat_logs[-2:])
|
||||
rest_backnforth = list(map(message_to_chatml, session_summaries[-1:]))
|
||||
else:
|
||||
last_backnforth = reciprocal_conversation_to_chatml(chat_logs[-2:])
|
||||
rest_backnforth = reciprocal_conversation_to_chatml(chat_logs[-4:-2])
|
||||
last_backnforth = reciprocal_conversation_to_chatml(chat_logs[-2:])
|
||||
rest_backnforth = reciprocal_conversation_to_chatml(chat_logs[-4:-2])
|
||||
|
||||
# Format user and system messages to chatml format
|
||||
system_chatml_message = [message_to_chatml(system_message, "system")]
|
||||
|
|
|
@ -88,7 +88,6 @@ def chat(q: Optional[str] = None):
|
|||
# Load Conversation History
|
||||
chat_session = state.processor_config.conversation.chat_session
|
||||
meta_log = state.processor_config.conversation.meta_log
|
||||
active_session_length = len(chat_session.split("\nAI:")) - 1 if chat_session else 0
|
||||
|
||||
# If user query is empty, return chat history
|
||||
if not q:
|
||||
|
@ -103,7 +102,7 @@ def chat(q: Optional[str] = None):
|
|||
logger.debug(f"Reference Context:\n{collated_result}")
|
||||
|
||||
try:
|
||||
gpt_response = converse(collated_result, q, active_session_length, meta_log, api_key=api_key)
|
||||
gpt_response = converse(collated_result, q, meta_log, api_key=api_key)
|
||||
status = "ok"
|
||||
except Exception as e:
|
||||
gpt_response = str(e)
|
||||
|
|
Loading…
Add table
Reference in a new issue