Add default conversation command to auto switch b/w general, notes modes

This was the default behavior but behavior regressed when adding slash
commands in PR #463
This commit is contained in:
Debanjum Singh Solanky 2023-08-27 14:30:03 -07:00
parent b45e1d8c0d
commit 74605f6159
7 changed files with 17 additions and 14 deletions

View file

@ -287,7 +287,7 @@
</div> </div>
<!--Add Text Box To Enter Query, Trigger Incremental Search OnChange --> <!--Add Text Box To Enter Query, Trigger Incremental Search OnChange -->
<input type="text" id="query" class="option" onkeyup=incrementalSearch(event) autofocus="autofocus" placeholder="Search directly from your knowledge base"> <input type="text" id="query" class="option" onkeyup=incrementalSearch(event) autofocus="autofocus" placeholder="Search your knowledge base using natural language">
<div id="options"> <div id="options">
<!--Add Dropdown to Select Query Type --> <!--Add Dropdown to Select Query Type -->

View file

@ -119,7 +119,7 @@ def converse_offline(
model: str = "llama-2-7b-chat.ggmlv3.q4_K_S.bin", model: str = "llama-2-7b-chat.ggmlv3.q4_K_S.bin",
loaded_model: Union[GPT4All, None] = None, loaded_model: Union[GPT4All, None] = None,
completion_func=None, completion_func=None,
conversation_command=ConversationCommand.Notes, conversation_command=ConversationCommand.Default,
) -> Union[ThreadedGenerator, Iterator[str]]: ) -> Union[ThreadedGenerator, Iterator[str]]:
""" """
Converse with user using Llama Converse with user using Llama

View file

@ -109,7 +109,7 @@ def converse(
api_key: Optional[str] = None, api_key: Optional[str] = None,
temperature: float = 0.2, temperature: float = 0.2,
completion_func=None, completion_func=None,
conversation_command=ConversationCommand.Notes, conversation_command=ConversationCommand.Default,
): ):
""" """
Converse with user using OpenAI's ChatGPT Converse with user using OpenAI's ChatGPT

View file

@ -236,9 +236,10 @@ Q:"""
# -- # --
help_message = PromptTemplate.from_template( help_message = PromptTemplate.from_template(
""" """
**/notes**: Chat using the information in your knowledge base.
**/general**: Chat using just Khoj's general knowledge. This will not search against your notes.
**/default**: Chat using your knowledge base and Khoj's general knowledge for context.
**/help**: Show this help message. **/help**: Show this help message.
**/notes**: Chat using the information in your knowledge base. This is the default method.
**/general**: Chat using general knowledge with the LLM. This will not search against your notes.
You are using the **{model}** model. You are using the **{model}** model.
**version**: {version} **version**: {version}

View file

@ -705,7 +705,7 @@ async def chat(
compiled_references, inferred_queries = await extract_references_and_questions( compiled_references, inferred_queries = await extract_references_and_questions(
request, q, (n or 5), conversation_command request, q, (n or 5), conversation_command
) )
conversation_command = get_conversation_command(query=q, any_references=is_none_or_empty(compiled_references)) conversation_command = get_conversation_command(query=q, any_references=not is_none_or_empty(compiled_references))
if conversation_command == ConversationCommand.Help: if conversation_command == ConversationCommand.Help:
model_type = "offline" if state.processor_config.conversation.enable_offline_chat else "openai" model_type = "offline" if state.processor_config.conversation.enable_offline_chat else "openai"
formatted_help = help_message.format(model=model_type, version=state.khoj_version) formatted_help = help_message.format(model=model_type, version=state.khoj_version)
@ -755,7 +755,7 @@ async def extract_references_and_questions(
request: Request, request: Request,
q: str, q: str,
n: int, n: int,
conversation_type: ConversationCommand = ConversationCommand.Notes, conversation_type: ConversationCommand = ConversationCommand.Default,
): ):
# Load Conversation History # Load Conversation History
meta_log = state.processor_config.conversation.meta_log meta_log = state.processor_config.conversation.meta_log

View file

@ -60,15 +60,15 @@ def update_telemetry_state(
def get_conversation_command(query: str, any_references: bool = False) -> ConversationCommand: def get_conversation_command(query: str, any_references: bool = False) -> ConversationCommand:
if query.startswith("/notes"): if query.startswith("/notes"):
return ConversationCommand.Notes return ConversationCommand.Notes
elif query.startswith("/general"):
return ConversationCommand.General
elif query.startswith("/help"): elif query.startswith("/help"):
return ConversationCommand.Help return ConversationCommand.Help
elif query.startswith("/general"):
return ConversationCommand.General
# If no relevant notes found for the given query # If no relevant notes found for the given query
elif not any_references: elif not any_references:
return ConversationCommand.General return ConversationCommand.General
else: else:
return ConversationCommand.Notes return ConversationCommand.Default
def generate_chat_response( def generate_chat_response(
@ -76,7 +76,7 @@ def generate_chat_response(
meta_log: dict, meta_log: dict,
compiled_references: List[str] = [], compiled_references: List[str] = [],
inferred_queries: List[str] = [], inferred_queries: List[str] = [],
conversation_command: ConversationCommand = ConversationCommand.Notes, conversation_command: ConversationCommand = ConversationCommand.Default,
) -> Union[ThreadedGenerator, Iterator[str]]: ) -> Union[ThreadedGenerator, Iterator[str]]:
def _save_to_conversation_log( def _save_to_conversation_log(
q: str, q: str,

View file

@ -214,13 +214,15 @@ def log_telemetry(
class ConversationCommand(str, Enum): class ConversationCommand(str, Enum):
Default = "default"
General = "general" General = "general"
Notes = "notes" Notes = "notes"
Help = "help" Help = "help"
command_descriptions = { command_descriptions = {
ConversationCommand.General: "This command allows you to search talk with the LLM without including context from your knowledge base.", ConversationCommand.General: "Only talk about information that relies on Khoj's general knowledge, not your personal knowledge base.",
ConversationCommand.Notes: "This command allows you to search talk with the LLM while including context from your knowledge base.", ConversationCommand.Notes: "Only talk about information that is available in your knowledge base.",
ConversationCommand.Help: "This command displays a help message with all available commands and other metadata.", ConversationCommand.Default: "The default command when no command specified. It intelligently auto-switches between general and notes mode.",
ConversationCommand.Help: "Display a help message with all available commands and other metadata.",
} }