mirror of
https://github.com/Mintplex-Labs/anything-llm.git
synced 2025-05-02 17:07:13 +00:00
fix: set lower maxChunk limit on native embedder to stay within resource constraints
chore: update comment for what embedding chunk means
This commit is contained in:
parent
b40cfead88
commit
a7f6003277
4 changed files with 5 additions and 5 deletions
|
@ -14,7 +14,7 @@ class AzureOpenAiEmbedder {
|
|||
);
|
||||
this.openai = openai;
|
||||
|
||||
// The maximum amount of "inputs" that OpenAI API can process in a single call.
|
||||
// Limit of how many strings we can process in a single pass to stay with resource or network limits
|
||||
// https://learn.microsoft.com/en-us/azure/ai-services/openai/faq#i-am-trying-to-use-embeddings-and-received-the-error--invalidrequesterror--too-many-inputs--the-max-number-of-inputs-is-1---how-do-i-fix-this-:~:text=consisting%20of%20up%20to%2016%20inputs%20per%20API%20request
|
||||
this.embeddingMaxChunkLength = 16;
|
||||
}
|
||||
|
|
|
@ -17,7 +17,7 @@ class LocalAiEmbedder {
|
|||
});
|
||||
this.openai = new OpenAIApi(config);
|
||||
|
||||
// Arbitrary limit of string size in chars to ensure we stay within reasonable POST request size.
|
||||
// Limit of how many strings we can process in a single pass to stay with resource or network limits
|
||||
this.embeddingMaxChunkLength = maximumChunkLength();
|
||||
}
|
||||
|
||||
|
|
|
@ -13,8 +13,8 @@ class NativeEmbedder {
|
|||
);
|
||||
this.modelPath = path.resolve(this.cacheDir, "Xenova", "all-MiniLM-L6-v2");
|
||||
|
||||
// Arbitrary limit of string size in chars to ensure we stay within reasonable POST request size.
|
||||
this.embeddingMaxChunkLength = 1_000;
|
||||
// Limit of how many strings we can process in a single pass to stay with resource or network limits
|
||||
this.embeddingMaxChunkLength = 50;
|
||||
|
||||
// Make directory when it does not exist in existing installations
|
||||
if (!fs.existsSync(this.cacheDir)) fs.mkdirSync(this.cacheDir);
|
||||
|
|
|
@ -10,7 +10,7 @@ class OpenAiEmbedder {
|
|||
const openai = new OpenAIApi(config);
|
||||
this.openai = openai;
|
||||
|
||||
// Arbitrary limit of string size in chars to ensure we stay within reasonable POST request size.
|
||||
// Limit of how many strings we can process in a single pass to stay with resource or network limits
|
||||
this.embeddingMaxChunkLength = 1_000;
|
||||
}
|
||||
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue