mirror of
https://github.com/khoj-ai/khoj.git
synced 2024-11-23 23:48:56 +01:00
Make converse prompt more precise. Fix default arg vals in gpt methods
- Set conversation_log arg default to dict - Increase default temperature to 0.2 for a little creativity in answering - Make GPT be more reliable in looking at past conversations for forming response
This commit is contained in:
parent
6c0e82b2d6
commit
24ddebf3ce
1 changed files with 4 additions and 4 deletions
|
@ -121,7 +121,7 @@ A:{ "search-type": "notes" }"""
|
||||||
return json.loads(story.strip(empty_escape_sequences))
|
return json.loads(story.strip(empty_escape_sequences))
|
||||||
|
|
||||||
|
|
||||||
def converse(text, user_query, conversation_log=None, api_key=None, temperature=0):
|
def converse(text, user_query, conversation_log={}, api_key=None, temperature=0.2):
|
||||||
"""
|
"""
|
||||||
Converse with user using OpenAI's ChatGPT
|
Converse with user using OpenAI's ChatGPT
|
||||||
"""
|
"""
|
||||||
|
@ -129,9 +129,9 @@ def converse(text, user_query, conversation_log=None, api_key=None, temperature=
|
||||||
model = "gpt-3.5-turbo"
|
model = "gpt-3.5-turbo"
|
||||||
openai.api_key = api_key or os.getenv("OPENAI_API_KEY")
|
openai.api_key = api_key or os.getenv("OPENAI_API_KEY")
|
||||||
|
|
||||||
personality_primer = "You are a friendly, helpful personal assistant."
|
personality_primer = "You are Khoj, a friendly, smart and helpful personal assistant."
|
||||||
conversation_primer = f"""
|
conversation_primer = f"""
|
||||||
Using the notes and our chats as context, answer the following question.
|
Using the notes and our past conversations as context, answer the following question.
|
||||||
Current Date: {datetime.now().strftime("%Y-%m-%d")}
|
Current Date: {datetime.now().strftime("%Y-%m-%d")}
|
||||||
|
|
||||||
Notes:
|
Notes:
|
||||||
|
@ -159,7 +159,7 @@ Question: {user_query}"""
|
||||||
return story.strip(empty_escape_sequences)
|
return story.strip(empty_escape_sequences)
|
||||||
|
|
||||||
|
|
||||||
def generate_chatml_messages_with_context(user_message, system_message, conversation_log=None):
|
def generate_chatml_messages_with_context(user_message, system_message, conversation_log={}):
|
||||||
"""Generate messages for ChatGPT with context from previous conversation"""
|
"""Generate messages for ChatGPT with context from previous conversation"""
|
||||||
# Extract Chat History for Context
|
# Extract Chat History for Context
|
||||||
chat_logs = [f'{chat["message"]}\n\nNotes:\n{chat.get("context","")}' for chat in conversation_log.get("chat", [])]
|
chat_logs = [f'{chat["message"]}\n\nNotes:\n{chat.get("context","")}' for chat in conversation_log.get("chat", [])]
|
||||||
|
|
Loading…
Reference in a new issue