diff --git a/src/khoj/interface/web/assets/org.min.js b/src/khoj/interface/web/assets/org.min.js index 0a870c9c..a919b280 100644 --- a/src/khoj/interface/web/assets/org.min.js +++ b/src/khoj/interface/web/assets/org.min.js @@ -614,8 +614,10 @@ var Org = (function () { var notBlankNextToken = this.lexer.peekNextToken(); if (blankToken && !notBlankNextToken.isListElement()) this.lexer.pushToken(blankToken); // Recover blank token only when next line is not listElement. - if (notBlankNextToken.indentation <= rootIndentation) - break; // end of the list + // End of the list if hit less indented line or end of directive + if (notBlankNextToken.indentation <= rootIndentation || + (notBlankNextToken.type === Lexer.tokens.directive && notBlankNextToken.endDirective)) + break; var element = this.parseElement(); // recursive if (element) diff --git a/src/khoj/interface/web/index.html b/src/khoj/interface/web/index.html index 7a0f896c..49929b45 100644 --- a/src/khoj/interface/web/index.html +++ b/src/khoj/interface/web/index.html @@ -414,6 +414,10 @@ border: 1px solid rgb(229, 229, 229); } + img { + max-width: 90%; + } + diff --git a/src/khoj/processor/github/github_to_jsonl.py b/src/khoj/processor/github/github_to_jsonl.py index 70ea7bf2..6f21749d 100644 --- a/src/khoj/processor/github/github_to_jsonl.py +++ b/src/khoj/processor/github/github_to_jsonl.py @@ -152,7 +152,10 @@ class GithubToJsonl(TextToJsonl): content = "" for chunk in response.iter_content(chunk_size=2048): if chunk: - content += chunk.decode("utf-8") + try: + content += chunk.decode("utf-8") + except Exception as e: + logger.error(f"Unable to decode chunk from {file_url}") return content diff --git a/src/khoj/routers/api.py b/src/khoj/routers/api.py index 7d6be97d..58ba713d 100644 --- a/src/khoj/routers/api.py +++ b/src/khoj/routers/api.py @@ -393,7 +393,7 @@ def update( @api.get("/chat") -def chat( +async def chat( request: Request, q: Optional[str] = None, client: Optional[str] = None, @@ -436,7 +436,9 @@ def chat( with timer("Searching knowledge base took", logger): result_list = [] for query in inferred_queries: - result_list.extend(search(query, n=5, r=True, score_threshold=-5.0, dedupe=False)) + result_list.extend( + await search(query, request=request, n=5, r=True, score_threshold=-5.0, dedupe=False) + ) compiled_references = [item.additional["compiled"] for item in result_list] # Switch to general conversation type if no relevant notes found for the given query