Clean-up commented out code

This commit is contained in:
sabaimran 2023-07-05 11:24:40 -07:00
parent 79b1b1d350
commit 67a8795b1f
3 changed files with 31 additions and 46 deletions

View file

@ -84,7 +84,6 @@
function readStream() { function readStream() {
reader.read().then(({ done, value }) => { reader.read().then(({ done, value }) => {
if (done) { if (done) {
console.log("Stream complete");
return; return;
} }
@ -99,8 +98,6 @@
new_response_text.innerHTML += polishedReference; new_response_text.innerHTML += polishedReference;
} else { } else {
new_response_text.innerHTML += chunk; new_response_text.innerHTML += chunk;
console.log(`Received ${chunk.length} bytes of data`);
console.log(`Chunk: ${chunk}`);
document.getElementById("chat-body").scrollTop = document.getElementById("chat-body").scrollHeight; document.getElementById("chat-body").scrollTop = document.getElementById("chat-body").scrollHeight;
readStream(); readStream();
} }
@ -108,14 +105,6 @@
} }
readStream(); readStream();
}); });
// fetch(url)
// .then(data => {
// // Render message by Khoj to chat body
// console.log(data.response);
// renderMessageWithReference(data.response, "khoj", data.context);
// });
} }
function incrementalChat(event) { function incrementalChat(event) {
@ -428,31 +417,33 @@
<script> <script>
var khojBannerSubmit = document.getElementById("khoj-banner-submit"); var khojBannerSubmit = document.getElementById("khoj-banner-submit");
khojBannerSubmit.addEventListener("click", function(event) { if (khojBannerSubmit != null) {
event.preventDefault(); khojBannerSubmit.addEventListener("click", function(event) {
var email = document.getElementById("khoj-banner-email").value; event.preventDefault();
fetch("https://lantern.khoj.dev/beta/users/", { var email = document.getElementById("khoj-banner-email").value;
method: "POST", fetch("https://lantern.khoj.dev/beta/users/", {
body: JSON.stringify({ method: "POST",
email: email body: JSON.stringify({
}), email: email
headers: { }),
"Content-Type": "application/json" headers: {
} "Content-Type": "application/json"
}).then(function(response) { }
return response.json(); }).then(function(response) {
}).then(function(data) { return response.json();
console.log(data); }).then(function(data) {
if (data.user != null) { console.log(data);
document.getElementById("khoj-banner").innerHTML = "Thanks for signing up. We'll be in touch soon! 🚀"; if (data.user != null) {
document.getElementById("khoj-banner-submit").remove(); document.getElementById("khoj-banner").innerHTML = "Thanks for signing up. We'll be in touch soon! 🚀";
} else { document.getElementById("khoj-banner-submit").remove();
} else {
document.getElementById("khoj-banner").innerHTML = "There was an error signing up. Please contact team@khoj.dev";
}
}).catch(function(error) {
console.log(error);
document.getElementById("khoj-banner").innerHTML = "There was an error signing up. Please contact team@khoj.dev"; document.getElementById("khoj-banner").innerHTML = "There was an error signing up. Please contact team@khoj.dev";
} });
}).catch(function(error) {
console.log(error);
document.getElementById("khoj-banner").innerHTML = "There was an error signing up. Please contact team@khoj.dev";
}); });
}); }
</script> </script>
</html> </html>

View file

@ -2,9 +2,7 @@
import os import os
import logging import logging
from datetime import datetime from datetime import datetime
from typing import Any, Optional from typing import Any
from uuid import UUID
import asyncio
from threading import Thread from threading import Thread
import json import json
@ -12,10 +10,8 @@ import json
from langchain.chat_models import ChatOpenAI from langchain.chat_models import ChatOpenAI
from langchain.llms import OpenAI from langchain.llms import OpenAI
from langchain.schema import ChatMessage from langchain.schema import ChatMessage
from langchain.callbacks.base import BaseCallbackHandler
from langchain.callbacks.streaming_stdout import StreamingStdOutCallbackHandler from langchain.callbacks.streaming_stdout import StreamingStdOutCallbackHandler
from langchain.callbacks import AsyncIteratorCallbackHandler from langchain.callbacks.base import BaseCallbackManager
from langchain.callbacks.base import BaseCallbackManager, AsyncCallbackHandler
import openai import openai
import tiktoken import tiktoken
from tenacity import ( from tenacity import (
@ -50,6 +46,7 @@ class ThreadedGenerator:
item = self.queue.get() item = self.queue.get()
if item is StopIteration: if item is StopIteration:
if self.completion_func: if self.completion_func:
# The completion func effective acts as a callback. It adds the aggregated response to the conversation history. It's constructed in api.py.
self.completion_func(gpt_response=self.response) self.completion_func(gpt_response=self.response)
raise StopIteration raise StopIteration
return item return item

View file

@ -398,7 +398,6 @@ def update(
@api.get("/chat/init") @api.get("/chat/init")
def chat_init( def chat_init(
request: Request, request: Request,
q: Optional[str] = None,
client: Optional[str] = None, client: Optional[str] = None,
user_agent: Optional[str] = Header(None), user_agent: Optional[str] = Header(None),
referer: Optional[str] = Header(None), referer: Optional[str] = Header(None),
@ -429,9 +428,7 @@ def chat_init(
) )
] ]
# If user query is empty, return chat history return {"status": "ok", "response": meta_log.get("chat", [])}
if not q:
return {"status": "ok", "response": meta_log.get("chat", [])}
@api.get("/chat", response_class=StreamingResponse) @api.get("/chat", response_class=StreamingResponse)
@ -474,7 +471,7 @@ async def chat(
chat_session = state.processor_config.conversation.chat_session chat_session = state.processor_config.conversation.chat_session
meta_log = state.processor_config.conversation.meta_log meta_log = state.processor_config.conversation.meta_log
# If user query is empty, return chat history # If user query is empty, return nothing
if not q: if not q:
return StreamingResponse(None) return StreamingResponse(None)