patch: API key to localai service calls ()

connect 
This commit is contained in:
Timothy Carambat 2023-12-11 14:18:28 -08:00 committed by GitHub
parent fef5169ca4
commit cba66150d7
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
4 changed files with 73 additions and 40 deletions
frontend/src/components/EmbeddingSelection/LocalAiOptions
server
models
utils
EmbeddingEngines/localAi
helpers

View file

@ -6,51 +6,80 @@ export default function LocalAiOptions({ settings }) {
settings?.EmbeddingBasePath
);
const [basePath, setBasePath] = useState(settings?.EmbeddingBasePath);
function updateBasePath() {
setBasePath(basePathValue);
}
const [apiKeyValue, setApiKeyValue] = useState(settings?.LocalAiApiKey);
const [apiKey, setApiKey] = useState(settings?.LocalAiApiKey);
return (
<>
<div className="flex flex-col w-60">
<label className="text-white text-sm font-semibold block mb-4">
LocalAI Base URL
</label>
<input
type="url"
name="EmbeddingBasePath"
className="bg-zinc-900 text-white placeholder-white placeholder-opacity-60 text-sm rounded-lg focus:border-white block w-full p-2.5"
placeholder="http://localhost:8080/v1"
defaultValue={settings?.EmbeddingBasePath}
onChange={(e) => setBasePathValue(e.target.value)}
onBlur={updateBasePath}
required={true}
autoComplete="off"
spellCheck={false}
<div className="w-full flex items-center gap-4">
<div className="flex flex-col w-60">
<label className="text-white text-sm font-semibold block mb-4">
LocalAI Base URL
</label>
<input
type="url"
name="EmbeddingBasePath"
className="bg-zinc-900 text-white placeholder-white placeholder-opacity-60 text-sm rounded-lg focus:border-white block w-full p-2.5"
placeholder="http://localhost:8080/v1"
defaultValue={settings?.EmbeddingBasePath}
onChange={(e) => setBasePathValue(e.target.value)}
onBlur={() => setBasePath(basePathValue)}
required={true}
autoComplete="off"
spellCheck={false}
/>
</div>
<LocalAIModelSelection
settings={settings}
apiKey={apiKey}
basePath={basePath}
/>
<div className="flex flex-col w-60">
<label className="text-white text-sm font-semibold block mb-4">
Max embedding chunk length
</label>
<input
type="number"
name="EmbeddingModelMaxChunkLength"
className="bg-zinc-900 text-white placeholder-white placeholder-opacity-60 text-sm rounded-lg focus:border-white block w-full p-2.5"
placeholder="1000"
min={1}
onScroll={(e) => e.target.blur()}
defaultValue={settings?.EmbeddingModelMaxChunkLength}
required={false}
autoComplete="off"
/>
</div>
</div>
<LocalAIModelSelection settings={settings} basePath={basePath} />
<div className="flex flex-col w-60">
<label className="text-white text-sm font-semibold block mb-4">
Max embedding chunk length
</label>
<input
type="number"
name="EmbeddingModelMaxChunkLength"
className="bg-zinc-900 text-white placeholder-white placeholder-opacity-60 text-sm rounded-lg focus:border-white block w-full p-2.5"
placeholder="1000"
min={1}
onScroll={(e) => e.target.blur()}
defaultValue={settings?.EmbeddingModelMaxChunkLength}
required={false}
autoComplete="off"
/>
<div className="w-full flex items-center gap-4">
<div className="flex flex-col w-60">
<div className="flex flex-col gap-y-1 mb-4">
<label className="text-white text-sm font-semibold block">
Local AI API Key
</label>
<p className="text-xs italic text-white/60">
optional API key to use if running LocalAI with API keys.
</p>
</div>
<input
type="password"
name="LocalAiApiKey"
className="bg-zinc-900 text-white placeholder-white placeholder-opacity-60 text-sm rounded-lg focus:border-white block w-full p-2.5"
placeholder="sk-mysecretkey"
defaultValue={settings?.LocalAiApiKey ? "*".repeat(20) : ""}
autoComplete="off"
spellCheck={false}
onChange={(e) => setApiKeyValue(e.target.value)}
onBlur={() => setApiKey(apiKeyValue)}
/>
</div>
</div>
</>
);
}
function LocalAIModelSelection({ settings, basePath = null }) {
function LocalAIModelSelection({ settings, apiKey = null, basePath = null }) {
const [customModels, setCustomModels] = useState([]);
const [loading, setLoading] = useState(true);
@ -62,12 +91,12 @@ function LocalAIModelSelection({ settings, basePath = null }) {
return;
}
setLoading(true);
const { models } = await System.customModels("localai", null, basePath);
const { models } = await System.customModels("localai", apiKey, basePath);
setCustomModels(models || []);
setLoading(false);
}
findCustomModels();
}, [basePath]);
}, [basePath, apiKey]);
if (loading || customModels.length == 0) {
return (

View file

@ -29,6 +29,7 @@ const SystemSettings = {
EmbeddingModelPref: process.env.EMBEDDING_MODEL_PREF,
EmbeddingModelMaxChunkLength:
process.env.EMBEDDING_MODEL_MAX_CHUNK_LENGTH,
LocalAiApiKey: !!process.env.LOCAL_AI_API_KEY,
...(vectorDB === "pinecone"
? {
PineConeEnvironment: process.env.PINECONE_ENVIRONMENT,
@ -98,13 +99,11 @@ const SystemSettings = {
AzureOpenAiEmbeddingModelPref: process.env.EMBEDDING_MODEL_PREF,
}
: {}),
...(llmProvider === "localai"
? {
LocalAiBasePath: process.env.LOCAL_AI_BASE_PATH,
LocalAiModelPref: process.env.LOCAL_AI_MODEL_PREF,
LocalAiTokenLimit: process.env.LOCAL_AI_MODEL_TOKEN_LIMIT,
LocalAiApiKey: !!process.env.LOCAL_AI_API_KEY,
// For embedding credentials when localai is selected.
OpenAiKey: !!process.env.OPEN_AI_KEY,

View file

@ -9,6 +9,11 @@ class LocalAiEmbedder {
throw new Error("No embedding model was set.");
const config = new Configuration({
basePath: process.env.EMBEDDING_BASE_PATH,
...(!!process.env.LOCAL_AI_API_KEY
? {
apiKey: process.env.LOCAL_AI_API_KEY,
}
: {}),
});
this.openai = new OpenAIApi(config);

View file

@ -8,7 +8,7 @@ async function getCustomModels(provider = "", apiKey = null, basePath = null) {
case "openai":
return await openAiModels(apiKey);
case "localai":
return await localAIModels(basePath);
return await localAIModels(basePath, apiKey);
case "native-llm":
return nativeLLMModels();
default: