anything-llm/server/utils/helpers/index.js
Timothy Carambat 1f29cec918
Multiple LLM Support framework + AzureOpenAI Support ()
* Remove LangchainJS for chat support chaining
Implement runtime LLM selection
Implement AzureOpenAI Support for LLM + Emebedding
WIP on frontend
Update env to reflect the new fields

* Remove LangchainJS for chat support chaining
Implement runtime LLM selection
Implement AzureOpenAI Support for LLM + Emebedding
WIP on frontend
Update env to reflect the new fields

* Replace keys with LLM Selection in settings modal
Enforce checks for new ENVs depending on LLM selection
2023-08-04 14:56:27 -07:00

42 lines
1.2 KiB
JavaScript

function getVectorDbClass() {
const vectorSelection = process.env.VECTOR_DB || "pinecone";
switch (vectorSelection) {
case "pinecone":
const { Pinecone } = require("../vectorDbProviders/pinecone");
return Pinecone;
case "chroma":
const { Chroma } = require("../vectorDbProviders/chroma");
return Chroma;
case "lancedb":
const { LanceDb } = require("../vectorDbProviders/lance");
return LanceDb;
default:
throw new Error("ENV: No VECTOR_DB value found in environment!");
}
}
function getLLMProvider() {
const vectorSelection = process.env.LLM_PROVIDER || "openai";
switch (vectorSelection) {
case "openai":
const { OpenAi } = require("../AiProviders/openAi");
return new OpenAi();
case "azure":
const { AzureOpenAi } = require("../AiProviders/azureOpenAi");
return new AzureOpenAi();
default:
throw new Error("ENV: No LLM_PROVIDER value found in environment!");
}
}
function toChunks(arr, size) {
return Array.from({ length: Math.ceil(arr.length / size) }, (_v, i) =>
arr.slice(i * size, i * size + size)
);
}
module.exports = {
getVectorDbClass,
getLLMProvider,
toChunks,
};