mirror of
https://github.com/Mintplex-Labs/anything-llm.git
synced 2025-04-17 18:18:11 +00:00
[FIX] OpenAI compatible endpoints query mode developer API bug fix (#1789)
fix query mode always responding with refusal message on develop api openai compatible endpoints
This commit is contained in:
parent
7a78ad3960
commit
910eb36cfe
1 changed files with 4 additions and 4 deletions
|
@ -112,8 +112,8 @@ async function chatSync({
|
|||
}
|
||||
|
||||
// For OpenAI Compatible chats, we cannot do backfilling so we simply aggregate results here.
|
||||
contextTexts = [...contextTexts];
|
||||
sources = [...sources];
|
||||
contextTexts = [...contextTexts, ...vectorSearchResults.contextTexts];
|
||||
sources = [...sources, ...vectorSearchResults.sources];
|
||||
|
||||
// If in query mode and no context chunks are found from search, backfill, or pins - do not
|
||||
// let the LLM try to hallucinate a response or use general knowledge and exit early
|
||||
|
@ -328,8 +328,8 @@ async function streamChat({
|
|||
}
|
||||
|
||||
// For OpenAI Compatible chats, we cannot do backfilling so we simply aggregate results here.
|
||||
contextTexts = [...contextTexts];
|
||||
sources = [...sources];
|
||||
contextTexts = [...contextTexts, ...vectorSearchResults.contextTexts];
|
||||
sources = [...sources, ...vectorSearchResults.sources];
|
||||
|
||||
// If in query mode and no context chunks are found from search, backfill, or pins - do not
|
||||
// let the LLM try to hallucinate a response or use general knowledge and exit early
|
||||
|
|
Loading…
Add table
Reference in a new issue