patch: implement @lunamidori hotfix for LocalAI streaming chunk overflows ()

* patch: implement @lunamidori hotfix for LocalAI streaming chunk overflows
resolves 

* change log to error log

* log trace

* lint
This commit is contained in:
Timothy Carambat 2023-12-12 16:20:06 -08:00 committed by GitHub
parent d4f4d85492
commit 37cdb845a4
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23

View file

@ -253,7 +253,16 @@ function handleStreamResponses(response, stream, responseProps) {
} catch {}
if (!validJSON) {
chunk += message;
// It can be possible that the chunk decoding is running away
// and the message chunk fails to append due to string length.
// In this case abort the chunk and reset so we can continue.
// ref: https://github.com/Mintplex-Labs/anything-llm/issues/416
try {
chunk += message;
} catch (e) {
console.error(`Chunk appending error`, e);
chunk = "";
}
continue;
} else {
chunk = "";