Render references after chat response is streamed for smoother render

Otherwise the Khoj's chat response is filling up in between the
streamed message and already rendered references section at the bottom
of the message

Define OnlineContext type to simplify typing online context param
across other interfaces and functions
This commit is contained in:
Debanjum Singh Solanky 2024-08-02 02:33:04 +05:30
parent a733e5c1d4
commit 02b46a1784
5 changed files with 60 additions and 59 deletions

View file

@ -9,11 +9,11 @@ import NavMenu from '../components/navMenu/navMenu';
import { useSearchParams } from 'next/navigation'
import Loading from '../components/loading/loading';
import { convertMessageChunkToJson, handleImageResponse, processMessageChunk, RawReferenceData } from '../common/chatFunctions';
import { processMessageChunk } from '../common/chatFunctions';
import 'katex/dist/katex.min.css';
import { StreamMessage } from '../components/chatMessage/chatMessage';
import { Context, OnlineContext, StreamMessage } from '../components/chatMessage/chatMessage';
import { useIPLocationData, welcomeConsole } from '../common/utils';
import ChatInputArea, { ChatOptions } from '../components/chatInputArea/chatInputArea';
import { useAuthenticatedData } from '../common/auth';
@ -110,7 +110,6 @@ export default function Chat() {
const [uploadedFiles, setUploadedFiles] = useState<string[]>([]);
const [isMobileWidth, setIsMobileWidth] = useState(false);
const locationData = useIPLocationData();
const authenticatedData = useAuthenticatedData();
useEffect(() => {
@ -169,8 +168,11 @@ export default function Chat() {
const eventDelimiter = '␃🔚␗';
let buffer = "";
while (true) {
// Track context used for chat response
let context: Context[] = [];
let onlineContext: OnlineContext = {};
while (true) {
const { done, value } = await reader.read();
if (done) {
setQueryToProcess('');
@ -179,7 +181,6 @@ export default function Chat() {
}
const chunk = decoder.decode(value, { stream: true });
buffer += chunk;
let newEventIndex;
@ -194,7 +195,8 @@ export default function Chat() {
return;
}
processMessageChunk(event, currentMessage);
// Track context used for chat response. References are rendered at the end of the chat
({context, onlineContext} = processMessageChunk(event, currentMessage, context, onlineContext));
setMessages([...messages]);
}
@ -222,10 +224,7 @@ export default function Chat() {
setConversationID(newConversationId);
};
if (isLoading) {
return <Loading />;
}
if (isLoading) return <Loading />;
return (
<div className={styles.main + " " + styles.chatLayout}>

View file

@ -1,17 +1,13 @@
import { Context, OnlineContextData, StreamMessage } from "../components/chatMessage/chatMessage";
import { Context, OnlineContext, StreamMessage } from "../components/chatMessage/chatMessage";
export interface RawReferenceData {
context?: Context[];
onlineContext?: {
[key: string]: OnlineContextData
}
onlineContext?: OnlineContext;
}
export interface ResponseWithReferences {
context?: Context[];
online?: {
[key: string]: OnlineContextData
}
online?: OnlineContext;
response?: string;
}
@ -50,57 +46,65 @@ export function convertMessageChunkToJson(chunk: string): MessageChunk {
}
}
function handleJsonResponse(chunkData: any) {
const jsonData = chunkData as any;
if (jsonData.image || jsonData.detail) {
let responseWithReference = handleImageResponse(chunkData, true);
if (responseWithReference.response) return responseWithReference.response;
} else if (jsonData.response) {
return jsonData.response;
} else {
throw new Error("Invalid JSON response");
}
}
export function processMessageChunk(
rawChunk: string,
currentMessage: StreamMessage,
context: Context[] = [],
onlineContext: OnlineContext = {}): { context: Context[], onlineContext: OnlineContext } {
export function processMessageChunk(rawChunk: string, currentMessage: StreamMessage) {
const chunk = convertMessageChunkToJson(rawChunk);
if (!currentMessage) {
return;
}
if (!chunk || !chunk.type) {
return;
}
if (!currentMessage || !chunk || !chunk.type) return {context, onlineContext};
if (chunk.type === "status") {
console.log(`status: ${chunk.data}`);
const statusMessage = chunk.data as string;
currentMessage.trainOfThought.push(statusMessage);
} else if (chunk.type === "references") {
const references = chunk.data as RawReferenceData;
if (references.context) {
currentMessage.context = references.context;
}
if (references.onlineContext) {
currentMessage.onlineContext = references.onlineContext;
}
if (references.context) context = references.context;
if (references.onlineContext) onlineContext = references.onlineContext;
return {context, onlineContext}
} else if (chunk.type === "message") {
const chunkData = chunk.data;
if (chunkData !== null && typeof chunkData === 'object') {
currentMessage.rawResponse += handleJsonResponse(chunkData);
} else if (typeof chunkData === 'string' && chunkData.trim()?.startsWith("{") && chunkData.trim()?.endsWith("}")) {
try {
const jsonData = chunkData as any;
if (jsonData.image || jsonData.detail) {
let responseWithReference = handleImageResponse(chunk.data, true);
if (responseWithReference.response) currentMessage.rawResponse = responseWithReference.response;
if (responseWithReference.online) currentMessage.onlineContext = responseWithReference.online;
if (responseWithReference.context) currentMessage.context = responseWithReference.context;
} else if (jsonData.response) {
currentMessage.rawResponse = jsonData.response;
}
else {
console.debug("any message", chunk);
}
const jsonData = JSON.parse(chunkData.trim());
currentMessage.rawResponse += handleJsonResponse(jsonData);
} catch (e) {
currentMessage.rawResponse += chunkData;
currentMessage.rawResponse += JSON.stringify(chunkData);
}
} else {
currentMessage.rawResponse += chunkData;
}
} else if (chunk.type === "start_llm_response") {
console.log(`Started streaming: ${new Date()}`);
} else if (chunk.type === "end_llm_response") {
console.log(`Completed streaming: ${new Date()}`);
// Append any references after all the data has been streamed
if (onlineContext) currentMessage.onlineContext = onlineContext;
if (context) currentMessage.context = context;
// Mark current message streaming as completed
currentMessage.completed = true;
}
return {context, onlineContext};
}
export function handleImageResponse(imageJson: any, liveStream: boolean): ResponseWithReferences {

View file

@ -33,6 +33,10 @@ export interface Context {
file: string;
}
export interface OnlineContext {
[key: string]: OnlineContextData;
}
export interface WebPage {
link: string;
query: string;
@ -85,11 +89,9 @@ export interface SingleChatMessage {
automationId: string;
by: string;
message: string;
context: Context[];
created: string;
onlineContext: {
[key: string]: OnlineContextData
}
context: Context[];
onlineContext: OnlineContext;
rawQuery?: string;
intent?: Intent;
agent?: AgentData;
@ -99,9 +101,7 @@ export interface StreamMessage {
rawResponse: string;
trainOfThought: string[];
context: Context[];
onlineContext: {
[key: string]: OnlineContextData
}
onlineContext: OnlineContext;
completed: boolean;
rawQuery: string;
timestamp: string;

View file

@ -11,7 +11,7 @@ const md = new markdownIt({
typographer: true
});
import { Context, WebPage, OnlineContextData } from "../chatMessage/chatMessage";
import { Context, WebPage, OnlineContext } from "../chatMessage/chatMessage";
import { Card } from "@/components/ui/card";
import {
@ -161,7 +161,7 @@ function GenericOnlineReferenceCard(props: OnlineReferenceCardProps) {
)
}
export function constructAllReferences(contextData: Context[], onlineData: { [key: string]: OnlineContextData }) {
export function constructAllReferences(contextData: Context[], onlineData: OnlineContext) {
const onlineReferences: OnlineReferenceData[] = [];
const contextReferences: NotesContextReferenceData[] = [];

View file

@ -4,7 +4,7 @@ import styles from './factChecker.module.css';
import { useAuthenticatedData } from '@/app/common/auth';
import { useState, useEffect } from 'react';
import ChatMessage, { Context, OnlineContextData, WebPage } from '../components/chatMessage/chatMessage';
import ChatMessage, { Context, OnlineContext, OnlineContextData, WebPage } from '../components/chatMessage/chatMessage';
import { ModelPicker, Model } from '../components/modelPicker/modelPicker';
import ShareLink from '../components/shareLink/shareLink';
@ -47,9 +47,7 @@ interface SupplementReferences {
interface ResponseWithReferences {
context?: Context[];
online?: {
[key: string]: OnlineContextData
}
online?: OnlineContext;
response?: string;
}