mirror of
https://github.com/Mintplex-Labs/anything-llm.git
synced 2025-04-17 18:18:11 +00:00
patch: implement @lunamidori hotfix for LocalAI streaming chunk overflows (#433)
* patch: implement @lunamidori hotfix for LocalAI streaming chunk overflows resolves #416 * change log to error log * log trace * lint
This commit is contained in:
parent
d4f4d85492
commit
37cdb845a4
1 changed files with 10 additions and 1 deletions
|
@ -253,7 +253,16 @@ function handleStreamResponses(response, stream, responseProps) {
|
|||
} catch {}
|
||||
|
||||
if (!validJSON) {
|
||||
chunk += message;
|
||||
// It can be possible that the chunk decoding is running away
|
||||
// and the message chunk fails to append due to string length.
|
||||
// In this case abort the chunk and reset so we can continue.
|
||||
// ref: https://github.com/Mintplex-Labs/anything-llm/issues/416
|
||||
try {
|
||||
chunk += message;
|
||||
} catch (e) {
|
||||
console.error(`Chunk appending error`, e);
|
||||
chunk = "";
|
||||
}
|
||||
continue;
|
||||
} else {
|
||||
chunk = "";
|
||||
|
|
Loading…
Add table
Reference in a new issue