Fix bug for rendering chat references in LLM response

This commit is contained in:
sabaimran 2023-11-07 16:44:41 -08:00
parent 0e1cdb6536
commit 98cf095b65
2 changed files with 2 additions and 2 deletions

View file

@ -192,9 +192,9 @@
.then(response => {
const reader = response.body.getReader();
const decoder = new TextDecoder();
let references = null;
function readStream() {
let references = null;
reader.read().then(({ done, value }) => {
if (done) {
// Evaluate the contents of new_response_text.innerHTML after all the data has been streamed

View file

@ -187,9 +187,9 @@
.then(response => {
const reader = response.body.getReader();
const decoder = new TextDecoder();
let references = null;
function readStream() {
let references = null;
reader.read().then(({ done, value }) => {
if (done) {
// Evaluate the contents of new_response_text.innerHTML after all the data has been streamed