mirror of
https://github.com/Mintplex-Labs/anything-llm.git
synced 2025-04-17 18:18:11 +00:00
Handle undefined stream chunk for native LLM (#534)
This commit is contained in:
parent
74d2711d80
commit
e9f7b9b79e
1 changed files with 5 additions and 0 deletions
|
@ -268,6 +268,11 @@ function handleStreamResponses(response, stream, responseProps) {
|
|||
return new Promise(async (resolve) => {
|
||||
let fullText = "";
|
||||
for await (const chunk of stream) {
|
||||
if (chunk === undefined)
|
||||
throw new Error(
|
||||
"Stream returned undefined chunk. Aborting reply - check model provider logs."
|
||||
);
|
||||
|
||||
const content = chunk.hasOwnProperty("content") ? chunk.content : chunk;
|
||||
fullText += content;
|
||||
writeResponseChunk(response, {
|
||||
|
|
Loading…
Add table
Reference in a new issue