diff --git a/src/processor/conversation/gpt.py b/src/processor/conversation/gpt.py index d8d75cd9..c67739d1 100644 --- a/src/processor/conversation/gpt.py +++ b/src/processor/conversation/gpt.py @@ -210,20 +210,31 @@ def message_to_prompt(user_message, conversation_history="", gpt_message=None, s return f"{conversation_history}{restart_sequence} {user_message}{start_sequence}{gpt_message}" -def message_to_log(user_message, user_message_metadata, gpt_message, conversation_log=[]): +def message_to_log(user_message, gpt_message, user_message_metadata={}, conversation_log=[]): """Create json logs from messages, metadata for conversation log""" + default_user_message_metadata = { + "intent": { + "type": "remember", + "memory-type": "notes", + "query": user_message + }, + "trigger-emotion": "calm" + } + current_dt = datetime.now().strftime("%Y-%m-%d %H:%M:%S") + # Create json log from Human's message - human_log = user_message_metadata + human_log = user_message_metadata or default_user_message_metadata human_log["message"] = user_message - human_log["by"] = "Human" - human_log["created"] = datetime.now().strftime("%Y-%m-%d %H:%M:%S") + human_log["by"] = "you" + human_log["created"] = current_dt # Create json log from GPT's response - ai_log = {"message": gpt_message, "by": "AI", "created": datetime.now().strftime("%Y-%m-%d %H:%M:%S")} + khoj_log = {"message": gpt_message, "by": "khoj", "created": current_dt} - conversation_log.extend([human_log, ai_log]) + conversation_log.extend([human_log, khoj_log]) return conversation_log + def extract_summaries(metadata): """Extract summaries from metadata""" return ''.join( diff --git a/src/routers/api_beta.py b/src/routers/api_beta.py index e3578586..8170999a 100644 --- a/src/routers/api_beta.py +++ b/src/routers/api_beta.py @@ -100,7 +100,7 @@ def chat(q: Optional[str]=None): # Update Conversation History state.processor_config.conversation.chat_session = message_to_prompt(q, chat_session, gpt_message=gpt_response) - state.processor_config.conversation.meta_log['chat'] = message_to_log(q, metadata, gpt_response, meta_log.get('chat', [])) + state.processor_config.conversation.meta_log['chat'] = message_to_log(q, gpt_response, metadata, meta_log.get('chat', [])) return {'status': status, 'response': gpt_response}