mirror of
https://github.com/Mintplex-Labs/anything-llm.git
synced 2025-04-17 18:18:11 +00:00
parent
fef5169ca4
commit
cba66150d7
4 changed files with 73 additions and 40 deletions
frontend/src/components/EmbeddingSelection/LocalAiOptions
server
|
@ -6,51 +6,80 @@ export default function LocalAiOptions({ settings }) {
|
|||
settings?.EmbeddingBasePath
|
||||
);
|
||||
const [basePath, setBasePath] = useState(settings?.EmbeddingBasePath);
|
||||
function updateBasePath() {
|
||||
setBasePath(basePathValue);
|
||||
}
|
||||
const [apiKeyValue, setApiKeyValue] = useState(settings?.LocalAiApiKey);
|
||||
const [apiKey, setApiKey] = useState(settings?.LocalAiApiKey);
|
||||
|
||||
return (
|
||||
<>
|
||||
<div className="flex flex-col w-60">
|
||||
<label className="text-white text-sm font-semibold block mb-4">
|
||||
LocalAI Base URL
|
||||
</label>
|
||||
<input
|
||||
type="url"
|
||||
name="EmbeddingBasePath"
|
||||
className="bg-zinc-900 text-white placeholder-white placeholder-opacity-60 text-sm rounded-lg focus:border-white block w-full p-2.5"
|
||||
placeholder="http://localhost:8080/v1"
|
||||
defaultValue={settings?.EmbeddingBasePath}
|
||||
onChange={(e) => setBasePathValue(e.target.value)}
|
||||
onBlur={updateBasePath}
|
||||
required={true}
|
||||
autoComplete="off"
|
||||
spellCheck={false}
|
||||
<div className="w-full flex items-center gap-4">
|
||||
<div className="flex flex-col w-60">
|
||||
<label className="text-white text-sm font-semibold block mb-4">
|
||||
LocalAI Base URL
|
||||
</label>
|
||||
<input
|
||||
type="url"
|
||||
name="EmbeddingBasePath"
|
||||
className="bg-zinc-900 text-white placeholder-white placeholder-opacity-60 text-sm rounded-lg focus:border-white block w-full p-2.5"
|
||||
placeholder="http://localhost:8080/v1"
|
||||
defaultValue={settings?.EmbeddingBasePath}
|
||||
onChange={(e) => setBasePathValue(e.target.value)}
|
||||
onBlur={() => setBasePath(basePathValue)}
|
||||
required={true}
|
||||
autoComplete="off"
|
||||
spellCheck={false}
|
||||
/>
|
||||
</div>
|
||||
<LocalAIModelSelection
|
||||
settings={settings}
|
||||
apiKey={apiKey}
|
||||
basePath={basePath}
|
||||
/>
|
||||
<div className="flex flex-col w-60">
|
||||
<label className="text-white text-sm font-semibold block mb-4">
|
||||
Max embedding chunk length
|
||||
</label>
|
||||
<input
|
||||
type="number"
|
||||
name="EmbeddingModelMaxChunkLength"
|
||||
className="bg-zinc-900 text-white placeholder-white placeholder-opacity-60 text-sm rounded-lg focus:border-white block w-full p-2.5"
|
||||
placeholder="1000"
|
||||
min={1}
|
||||
onScroll={(e) => e.target.blur()}
|
||||
defaultValue={settings?.EmbeddingModelMaxChunkLength}
|
||||
required={false}
|
||||
autoComplete="off"
|
||||
/>
|
||||
</div>
|
||||
</div>
|
||||
<LocalAIModelSelection settings={settings} basePath={basePath} />
|
||||
<div className="flex flex-col w-60">
|
||||
<label className="text-white text-sm font-semibold block mb-4">
|
||||
Max embedding chunk length
|
||||
</label>
|
||||
<input
|
||||
type="number"
|
||||
name="EmbeddingModelMaxChunkLength"
|
||||
className="bg-zinc-900 text-white placeholder-white placeholder-opacity-60 text-sm rounded-lg focus:border-white block w-full p-2.5"
|
||||
placeholder="1000"
|
||||
min={1}
|
||||
onScroll={(e) => e.target.blur()}
|
||||
defaultValue={settings?.EmbeddingModelMaxChunkLength}
|
||||
required={false}
|
||||
autoComplete="off"
|
||||
/>
|
||||
<div className="w-full flex items-center gap-4">
|
||||
<div className="flex flex-col w-60">
|
||||
<div className="flex flex-col gap-y-1 mb-4">
|
||||
<label className="text-white text-sm font-semibold block">
|
||||
Local AI API Key
|
||||
</label>
|
||||
<p className="text-xs italic text-white/60">
|
||||
optional API key to use if running LocalAI with API keys.
|
||||
</p>
|
||||
</div>
|
||||
|
||||
<input
|
||||
type="password"
|
||||
name="LocalAiApiKey"
|
||||
className="bg-zinc-900 text-white placeholder-white placeholder-opacity-60 text-sm rounded-lg focus:border-white block w-full p-2.5"
|
||||
placeholder="sk-mysecretkey"
|
||||
defaultValue={settings?.LocalAiApiKey ? "*".repeat(20) : ""}
|
||||
autoComplete="off"
|
||||
spellCheck={false}
|
||||
onChange={(e) => setApiKeyValue(e.target.value)}
|
||||
onBlur={() => setApiKey(apiKeyValue)}
|
||||
/>
|
||||
</div>
|
||||
</div>
|
||||
</>
|
||||
);
|
||||
}
|
||||
|
||||
function LocalAIModelSelection({ settings, basePath = null }) {
|
||||
function LocalAIModelSelection({ settings, apiKey = null, basePath = null }) {
|
||||
const [customModels, setCustomModels] = useState([]);
|
||||
const [loading, setLoading] = useState(true);
|
||||
|
||||
|
@ -62,12 +91,12 @@ function LocalAIModelSelection({ settings, basePath = null }) {
|
|||
return;
|
||||
}
|
||||
setLoading(true);
|
||||
const { models } = await System.customModels("localai", null, basePath);
|
||||
const { models } = await System.customModels("localai", apiKey, basePath);
|
||||
setCustomModels(models || []);
|
||||
setLoading(false);
|
||||
}
|
||||
findCustomModels();
|
||||
}, [basePath]);
|
||||
}, [basePath, apiKey]);
|
||||
|
||||
if (loading || customModels.length == 0) {
|
||||
return (
|
||||
|
|
|
@ -29,6 +29,7 @@ const SystemSettings = {
|
|||
EmbeddingModelPref: process.env.EMBEDDING_MODEL_PREF,
|
||||
EmbeddingModelMaxChunkLength:
|
||||
process.env.EMBEDDING_MODEL_MAX_CHUNK_LENGTH,
|
||||
LocalAiApiKey: !!process.env.LOCAL_AI_API_KEY,
|
||||
...(vectorDB === "pinecone"
|
||||
? {
|
||||
PineConeEnvironment: process.env.PINECONE_ENVIRONMENT,
|
||||
|
@ -98,13 +99,11 @@ const SystemSettings = {
|
|||
AzureOpenAiEmbeddingModelPref: process.env.EMBEDDING_MODEL_PREF,
|
||||
}
|
||||
: {}),
|
||||
|
||||
...(llmProvider === "localai"
|
||||
? {
|
||||
LocalAiBasePath: process.env.LOCAL_AI_BASE_PATH,
|
||||
LocalAiModelPref: process.env.LOCAL_AI_MODEL_PREF,
|
||||
LocalAiTokenLimit: process.env.LOCAL_AI_MODEL_TOKEN_LIMIT,
|
||||
LocalAiApiKey: !!process.env.LOCAL_AI_API_KEY,
|
||||
|
||||
// For embedding credentials when localai is selected.
|
||||
OpenAiKey: !!process.env.OPEN_AI_KEY,
|
||||
|
|
|
@ -9,6 +9,11 @@ class LocalAiEmbedder {
|
|||
throw new Error("No embedding model was set.");
|
||||
const config = new Configuration({
|
||||
basePath: process.env.EMBEDDING_BASE_PATH,
|
||||
...(!!process.env.LOCAL_AI_API_KEY
|
||||
? {
|
||||
apiKey: process.env.LOCAL_AI_API_KEY,
|
||||
}
|
||||
: {}),
|
||||
});
|
||||
this.openai = new OpenAIApi(config);
|
||||
|
||||
|
|
|
@ -8,7 +8,7 @@ async function getCustomModels(provider = "", apiKey = null, basePath = null) {
|
|||
case "openai":
|
||||
return await openAiModels(apiKey);
|
||||
case "localai":
|
||||
return await localAIModels(basePath);
|
||||
return await localAIModels(basePath, apiKey);
|
||||
case "native-llm":
|
||||
return nativeLLMModels();
|
||||
default:
|
||||
|
|
Loading…
Add table
Reference in a new issue