Strip the incoming query from the slash conversation command (#500)

* Strip the incoming query from the slash conversation command before passing it to the model or for search
* Return q when content index not loaded
* Remove -n 4 from pytest ini configuration to isolate test failures
This commit is contained in:
sabaimran 2023-10-13 21:11:23 -07:00 committed by GitHub
parent 96c0b21285
commit 09bb3686cc
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
2 changed files with 11 additions and 7 deletions

View file

@ -112,7 +112,7 @@ warn_unused_ignores = false
line-length = 120 line-length = 120
[tool.pytest.ini_options] [tool.pytest.ini_options]
addopts = "--strict-markers -n 4" addopts = "--strict-markers"
markers = [ markers = [
"chatquality: Evaluate chatbot capabilities and quality", "chatquality: Evaluate chatbot capabilities and quality",
] ]

View file

@ -702,10 +702,16 @@ async def chat(
) -> Response: ) -> Response:
perform_chat_checks() perform_chat_checks()
conversation_command = get_conversation_command(query=q, any_references=True) conversation_command = get_conversation_command(query=q, any_references=True)
q = q.replace(f"/{conversation_command.value}", "").strip()
compiled_references, inferred_queries, defiltered_query = await extract_references_and_questions( compiled_references, inferred_queries, defiltered_query = await extract_references_and_questions(
request, q, (n or 5), conversation_command request, q, (n or 5), conversation_command
) )
conversation_command = get_conversation_command(query=q, any_references=not is_none_or_empty(compiled_references))
if conversation_command == ConversationCommand.Default and is_none_or_empty(compiled_references):
conversation_command = ConversationCommand.General
if conversation_command == ConversationCommand.Help: if conversation_command == ConversationCommand.Help:
model_type = "offline" if state.processor_config.conversation.enable_offline_chat else "openai" model_type = "offline" if state.processor_config.conversation.enable_offline_chat else "openai"
formatted_help = help_message.format(model=model_type, version=state.khoj_version) formatted_help = help_message.format(model=model_type, version=state.khoj_version)
@ -768,18 +774,16 @@ async def extract_references_and_questions(
logger.warning( logger.warning(
"No content index loaded, so cannot extract references from knowledge base. Please configure your data sources and update the index to chat with your notes." "No content index loaded, so cannot extract references from knowledge base. Please configure your data sources and update the index to chat with your notes."
) )
return compiled_references, inferred_queries return compiled_references, inferred_queries, q
if conversation_type == ConversationCommand.General: if conversation_type == ConversationCommand.General:
return compiled_references, inferred_queries, q return compiled_references, inferred_queries, q
# Extract filter terms from user message # Extract filter terms from user message
defiltered_query = q defiltered_query = q
filter_terms = []
for filter in [DateFilter(), WordFilter(), FileFilter()]: for filter in [DateFilter(), WordFilter(), FileFilter()]:
filter_terms += filter.get_filter_terms(q) defiltered_query = filter.defilter(defiltered_query)
defiltered_query = filter.defilter(q) filters_in_query = q.replace(defiltered_query, "").strip()
filters_in_query = " ".join(filter_terms)
# Infer search queries from user message # Infer search queries from user message
with timer("Extracting search queries took", logger): with timer("Extracting search queries took", logger):