mirror of
https://github.com/Mintplex-Labs/anything-llm.git
synced 2025-03-24 04:01:09 +00:00
* Remove LangchainJS for chat support chaining Implement runtime LLM selection Implement AzureOpenAI Support for LLM + Emebedding WIP on frontend Update env to reflect the new fields * Remove LangchainJS for chat support chaining Implement runtime LLM selection Implement AzureOpenAI Support for LLM + Emebedding WIP on frontend Update env to reflect the new fields * Replace keys with LLM Selection in settings modal Enforce checks for new ENVs depending on LLM selection
42 lines
1.2 KiB
JavaScript
42 lines
1.2 KiB
JavaScript
function getVectorDbClass() {
|
|
const vectorSelection = process.env.VECTOR_DB || "pinecone";
|
|
switch (vectorSelection) {
|
|
case "pinecone":
|
|
const { Pinecone } = require("../vectorDbProviders/pinecone");
|
|
return Pinecone;
|
|
case "chroma":
|
|
const { Chroma } = require("../vectorDbProviders/chroma");
|
|
return Chroma;
|
|
case "lancedb":
|
|
const { LanceDb } = require("../vectorDbProviders/lance");
|
|
return LanceDb;
|
|
default:
|
|
throw new Error("ENV: No VECTOR_DB value found in environment!");
|
|
}
|
|
}
|
|
|
|
function getLLMProvider() {
|
|
const vectorSelection = process.env.LLM_PROVIDER || "openai";
|
|
switch (vectorSelection) {
|
|
case "openai":
|
|
const { OpenAi } = require("../AiProviders/openAi");
|
|
return new OpenAi();
|
|
case "azure":
|
|
const { AzureOpenAi } = require("../AiProviders/azureOpenAi");
|
|
return new AzureOpenAi();
|
|
default:
|
|
throw new Error("ENV: No LLM_PROVIDER value found in environment!");
|
|
}
|
|
}
|
|
|
|
function toChunks(arr, size) {
|
|
return Array.from({ length: Math.ceil(arr.length / size) }, (_v, i) =>
|
|
arr.slice(i * size, i * size + size)
|
|
);
|
|
}
|
|
|
|
module.exports = {
|
|
getVectorDbClass,
|
|
getLLMProvider,
|
|
toChunks,
|
|
};
|