mirror of
https://github.com/khoj-ai/khoj.git
synced 2024-12-02 20:03:01 +01:00
Introduce improved answer API and prompt. Use by default in chat web interface
- Improve GPT prompt - Make GPT answer users query based on provided notes instead of summarizing the provided notes - Make GPT be truthful using prompt and reduced temperature - Use Official OpenAI Q&A prompt from cookbook as starting reference - Replace summarize API with the improved answer API endpoint - Default to answer type in chat web interface. The chat type is not fit for default consumption yet
This commit is contained in:
parent
7184508784
commit
c3b624e351
3 changed files with 35 additions and 8 deletions
|
@ -50,9 +50,7 @@
|
||||||
document.getElementById("chat-input").value = "";
|
document.getElementById("chat-input").value = "";
|
||||||
|
|
||||||
// Generate backend API URL to execute query
|
// Generate backend API URL to execute query
|
||||||
url = type_ === "chat"
|
url = `/api/beta/${type_}?q=${encodeURIComponent(query)}`;
|
||||||
? `/api/beta/chat?q=${encodeURIComponent(query)}`
|
|
||||||
: `/api/beta/summarize?q=${encodeURIComponent(query)}`;
|
|
||||||
|
|
||||||
// Call specified Khoj API
|
// Call specified Khoj API
|
||||||
fetch(url)
|
fetch(url)
|
||||||
|
@ -112,8 +110,8 @@
|
||||||
|
|
||||||
<!--Select Chat Type from: Chat, Summarize -->
|
<!--Select Chat Type from: Chat, Summarize -->
|
||||||
<select id="chat-type" class="option" onchange="setTypeFieldInUrl(this)">
|
<select id="chat-type" class="option" onchange="setTypeFieldInUrl(this)">
|
||||||
|
<option value="answer">Answer</option>
|
||||||
<option value="chat">Chat</option>
|
<option value="chat">Chat</option>
|
||||||
<option value="summarize">Summarize</option>
|
|
||||||
</select>
|
</select>
|
||||||
</div>
|
</div>
|
||||||
</body>
|
</body>
|
||||||
|
|
|
@ -10,6 +10,34 @@ import openai
|
||||||
from khoj.utils.constants import empty_escape_sequences
|
from khoj.utils.constants import empty_escape_sequences
|
||||||
|
|
||||||
|
|
||||||
|
def answer(text, user_query, model, api_key=None, temperature=0.3, max_tokens=200):
|
||||||
|
"""
|
||||||
|
Answer user query using provided text as reference with OpenAI's GPT
|
||||||
|
"""
|
||||||
|
# Initialize Variables
|
||||||
|
openai.api_key = api_key or os.getenv("OPENAI_API_KEY")
|
||||||
|
|
||||||
|
# Setup Prompt based on Summary Type
|
||||||
|
prompt = f"""
|
||||||
|
You are a friendly, helpful personal assistant.
|
||||||
|
Using the users notes below, answer their following question. If the answer is not contained within the notes, say "I don't know."
|
||||||
|
|
||||||
|
Notes:
|
||||||
|
{text}
|
||||||
|
|
||||||
|
Question: {user_query}
|
||||||
|
|
||||||
|
Answer (in second person):"""
|
||||||
|
# Get Response from GPT
|
||||||
|
response = openai.Completion.create(
|
||||||
|
prompt=prompt, model=model, temperature=temperature, max_tokens=max_tokens, stop='"""'
|
||||||
|
)
|
||||||
|
|
||||||
|
# Extract, Clean Message from GPT's Response
|
||||||
|
story = response["choices"][0]["text"]
|
||||||
|
return str(story).replace("\n\n", "")
|
||||||
|
|
||||||
|
|
||||||
def summarize(text, summary_type, model, user_query=None, api_key=None, temperature=0.5, max_tokens=200):
|
def summarize(text, summary_type, model, user_query=None, api_key=None, temperature=0.5, max_tokens=200):
|
||||||
"""
|
"""
|
||||||
Summarize user input using OpenAI's GPT
|
Summarize user input using OpenAI's GPT
|
||||||
|
|
|
@ -10,6 +10,7 @@ from fastapi import APIRouter
|
||||||
# Internal Packages
|
# Internal Packages
|
||||||
from khoj.routers.api import search
|
from khoj.routers.api import search
|
||||||
from khoj.processor.conversation.gpt import (
|
from khoj.processor.conversation.gpt import (
|
||||||
|
answer,
|
||||||
converse,
|
converse,
|
||||||
extract_search_type,
|
extract_search_type,
|
||||||
message_to_log,
|
message_to_log,
|
||||||
|
@ -48,8 +49,8 @@ def search_beta(q: str, n: Optional[int] = 1):
|
||||||
return {"status": "ok", "result": search_results, "type": search_type}
|
return {"status": "ok", "result": search_results, "type": search_type}
|
||||||
|
|
||||||
|
|
||||||
@api_beta.get("/summarize")
|
@api_beta.get("/answer")
|
||||||
def summarize_beta(q: str):
|
def answer_beta(q: str):
|
||||||
# Initialize Variables
|
# Initialize Variables
|
||||||
model = state.processor_config.conversation.model
|
model = state.processor_config.conversation.model
|
||||||
api_key = state.processor_config.conversation.openai_api_key
|
api_key = state.processor_config.conversation.openai_api_key
|
||||||
|
@ -61,9 +62,9 @@ def summarize_beta(q: str):
|
||||||
# Converse with OpenAI GPT
|
# Converse with OpenAI GPT
|
||||||
result_list = search(q, n=1, r=True)
|
result_list = search(q, n=1, r=True)
|
||||||
collated_result = "\n".join([item.entry for item in result_list])
|
collated_result = "\n".join([item.entry for item in result_list])
|
||||||
logger.debug(f"Semantically Similar Notes:\n{collated_result}")
|
logger.debug(f"Reference Notes:\n{collated_result}")
|
||||||
try:
|
try:
|
||||||
gpt_response = summarize(collated_result, summary_type="notes", user_query=q, model=model, api_key=api_key)
|
gpt_response = answer(collated_result, user_query=q, model=model, api_key=api_key)
|
||||||
status = "ok"
|
status = "ok"
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
gpt_response = str(e)
|
gpt_response = str(e)
|
||||||
|
|
Loading…
Reference in a new issue