Move the chat API out of beta. Save chat sessions at 15min intervals

This commit is contained in:
Debanjum Singh Solanky 2023-03-10 17:20:52 -06:00
parent e16d0b6d7e
commit a71f168273
4 changed files with 78 additions and 82 deletions

View file

@ -9,6 +9,7 @@ import schedule
from fastapi.staticfiles import StaticFiles
# Internal Packages
from khoj.processor.conversation.gpt import summarize
from khoj.processor.ledger.beancount_to_jsonl import BeancountToJsonl
from khoj.processor.jsonl.jsonl_to_jsonl import JsonlToJsonl
from khoj.processor.markdown.markdown_to_jsonl import MarkdownToJsonl
@ -186,3 +187,39 @@ def configure_conversation_processor(conversation_processor_config):
conversation_processor.chat_session = ""
return conversation_processor
@schedule.repeat(schedule.every(15).minutes)
def save_chat_session():
# No need to create empty log file
if not (
state.processor_config
and state.processor_config.conversation
and state.processor_config.conversation.meta_log
and state.processor_config.conversation.chat_session
):
return
# Summarize Conversation Logs for this Session
chat_session = state.processor_config.conversation.chat_session
openai_api_key = state.processor_config.conversation.openai_api_key
conversation_log = state.processor_config.conversation.meta_log
model = state.processor_config.conversation.model
session = {
"summary": summarize(chat_session, summary_type="chat", model=model, api_key=openai_api_key),
"session-start": conversation_log.get("session", [{"session-end": 0}])[-1]["session-end"],
"session-end": len(conversation_log["chat"]),
}
if "session" in conversation_log:
conversation_log["session"].append(session)
else:
conversation_log["session"] = [session]
# Save Conversation Metadata Logs to Disk
conversation_logfile = resolve_absolute_path(state.processor_config.conversation.conversation_logfile)
conversation_logfile.parent.mkdir(parents=True, exist_ok=True) # create conversation directory if doesn't exist
with open(conversation_logfile, "w+", encoding="utf-8") as logfile:
json.dump(conversation_log, logfile)
state.processor_config.conversation.chat_session = None
logger.info("📩 Saved current chat session to conversation logs")

View file

@ -48,7 +48,7 @@
document.getElementById("chat-input").value = "";
// Generate backend API URL to execute query
let url = `/api/beta/chat?q=${encodeURIComponent(query)}`;
let url = `/api/chat?q=${encodeURIComponent(query)}`;
// Call specified Khoj API
fetch(url)
@ -78,7 +78,7 @@
}
window.onload = function () {
fetch('/api/beta/chat')
fetch('/api/chat')
.then(response => response.json())
.then(data => data.response)
.then(chat_logs => {

View file

@ -10,6 +10,7 @@ from fastapi import HTTPException
# Internal Packages
from khoj.configure import configure_processor, configure_search
from khoj.processor.conversation.gpt import converse, message_to_log, message_to_prompt
from khoj.search_type import image_search, text_search
from khoj.utils.helpers import timer
from khoj.utils.rawconfig import FullConfig, SearchResponse
@ -183,3 +184,40 @@ def update(t: Optional[SearchType] = None, force: Optional[bool] = False):
logger.info("📬 Processor reconfigured via API")
return {"status": "ok", "message": "khoj reloaded"}
@api.get("/chat")
def chat(q: Optional[str] = None):
# Initialize Variables
api_key = state.processor_config.conversation.openai_api_key
# Load Conversation History
chat_session = state.processor_config.conversation.chat_session
meta_log = state.processor_config.conversation.meta_log
# If user query is empty, return chat history
if not q:
if meta_log.get("chat"):
return {"status": "ok", "response": meta_log["chat"]}
else:
return {"status": "ok", "response": []}
# Collate context for GPT
result_list = search(q, n=2, r=True, score_threshold=0, dedupe=False)
collated_result = "\n\n".join([f"# {item.additional['compiled']}" for item in result_list])
logger.debug(f"Reference Context:\n{collated_result}")
try:
gpt_response = converse(collated_result, q, meta_log, api_key=api_key)
status = "ok"
except Exception as e:
gpt_response = str(e)
status = "error"
# Update Conversation History
state.processor_config.conversation.chat_session = message_to_prompt(q, chat_session, gpt_message=gpt_response)
state.processor_config.conversation.meta_log["chat"] = message_to_log(
q, gpt_response, khoj_message_metadata={"context": collated_result}, conversation_log=meta_log.get("chat", [])
)
return {"status": status, "response": gpt_response, "context": collated_result}

View file

@ -1,24 +1,18 @@
# Standard Packages
import json
import logging
from typing import Optional
# External Packages
import schedule
from fastapi import APIRouter
# Internal Packages
from khoj.routers.api import search
from khoj.processor.conversation.gpt import (
answer,
converse,
extract_search_type,
message_to_log,
message_to_prompt,
summarize,
)
from khoj.utils.state import SearchType
from khoj.utils.helpers import get_from_dict, resolve_absolute_path
from khoj.utils.helpers import get_from_dict
from khoj.utils import state
@ -68,76 +62,3 @@ def answer_beta(q: str):
status = "error"
return {"status": status, "response": gpt_response}
@api_beta.get("/chat")
def chat(q: Optional[str] = None):
# Initialize Variables
api_key = state.processor_config.conversation.openai_api_key
# Load Conversation History
chat_session = state.processor_config.conversation.chat_session
meta_log = state.processor_config.conversation.meta_log
# If user query is empty, return chat history
if not q:
if meta_log.get("chat"):
return {"status": "ok", "response": meta_log["chat"]}
else:
return {"status": "ok", "response": []}
# Collate context for GPT
result_list = search(q, n=2, r=True, score_threshold=0, dedupe=False)
collated_result = "\n\n".join([f"# {item.additional['compiled']}" for item in result_list])
logger.debug(f"Reference Context:\n{collated_result}")
try:
gpt_response = converse(collated_result, q, meta_log, api_key=api_key)
status = "ok"
except Exception as e:
gpt_response = str(e)
status = "error"
# Update Conversation History
state.processor_config.conversation.chat_session = message_to_prompt(q, chat_session, gpt_message=gpt_response)
state.processor_config.conversation.meta_log["chat"] = message_to_log(
q, gpt_response, khoj_message_metadata={"context": collated_result}, conversation_log=meta_log.get("chat", [])
)
return {"status": status, "response": gpt_response, "context": collated_result}
@schedule.repeat(schedule.every(5).minutes)
def save_chat_session():
# No need to create empty log file
if not (
state.processor_config
and state.processor_config.conversation
and state.processor_config.conversation.meta_log
and state.processor_config.conversation.chat_session
):
return
# Summarize Conversation Logs for this Session
chat_session = state.processor_config.conversation.chat_session
openai_api_key = state.processor_config.conversation.openai_api_key
conversation_log = state.processor_config.conversation.meta_log
model = state.processor_config.conversation.model
session = {
"summary": summarize(chat_session, summary_type="chat", model=model, api_key=openai_api_key),
"session-start": conversation_log.get("session", [{"session-end": 0}])[-1]["session-end"],
"session-end": len(conversation_log["chat"]),
}
if "session" in conversation_log:
conversation_log["session"].append(session)
else:
conversation_log["session"] = [session]
# Save Conversation Metadata Logs to Disk
conversation_logfile = resolve_absolute_path(state.processor_config.conversation.conversation_logfile)
conversation_logfile.parent.mkdir(parents=True, exist_ok=True) # create conversation directory if doesn't exist
with open(conversation_logfile, "w+", encoding="utf-8") as logfile:
json.dump(conversation_log, logfile)
state.processor_config.conversation.chat_session = None
logger.info("📩 Saved current chat session to conversation logs")