mirror of
https://github.com/Mintplex-Labs/anything-llm.git
synced 2025-03-13 13:42:23 +00:00
commit
0b1a0e0a1f
9 changed files with 166 additions and 106 deletions
.github/workflows
frontend/src
components/LLMSelection/AnthropicAiOptions
hooks
pages/WorkspaceSettings/ChatSettings/WorkspaceLLMSelection/ChatModelSelection
server
2
.github/workflows/dev-build.yaml
vendored
2
.github/workflows/dev-build.yaml
vendored
|
@ -6,7 +6,7 @@ concurrency:
|
|||
|
||||
on:
|
||||
push:
|
||||
branches: ['sharp-pdf-image-converter'] # put your current branch to create a build. Core team only.
|
||||
branches: ['chore/anthropic-model-endpoint'] # put your current branch to create a build. Core team only.
|
||||
paths-ignore:
|
||||
- '**.md'
|
||||
- 'cloud-deployments/*'
|
||||
|
|
|
@ -1,4 +1,12 @@
|
|||
import { useState, useEffect } from "react";
|
||||
import System from "@/models/system";
|
||||
|
||||
export default function AnthropicAiOptions({ settings }) {
|
||||
const [inputValue, setInputValue] = useState(settings?.AnthropicApiKey);
|
||||
const [anthropicApiKey, setAnthropicApiKey] = useState(
|
||||
settings?.AnthropicApiKey
|
||||
);
|
||||
|
||||
return (
|
||||
<div className="w-full flex flex-col">
|
||||
<div className="w-full flex items-center gap-[36px] mt-1.5">
|
||||
|
@ -15,45 +23,117 @@ export default function AnthropicAiOptions({ settings }) {
|
|||
required={true}
|
||||
autoComplete="off"
|
||||
spellCheck={false}
|
||||
onChange={(e) => setInputValue(e.target.value)}
|
||||
onBlur={() => setAnthropicApiKey(inputValue)}
|
||||
/>
|
||||
</div>
|
||||
|
||||
{!settings?.credentialsOnly && (
|
||||
<AnthropicModelSelection
|
||||
apiKey={anthropicApiKey}
|
||||
settings={settings}
|
||||
/>
|
||||
)}
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
const DEFAULT_MODELS = [
|
||||
{
|
||||
id: "claude-3-7-sonnet-20250219",
|
||||
name: "Claude 3.7 Sonnet",
|
||||
},
|
||||
{
|
||||
id: "claude-3-5-sonnet-20241022",
|
||||
name: "Claude 3.5 Sonnet (New)",
|
||||
},
|
||||
{
|
||||
id: "claude-3-5-haiku-20241022",
|
||||
name: "Claude 3.5 Haiku",
|
||||
},
|
||||
{
|
||||
id: "claude-3-5-sonnet-20240620",
|
||||
name: "Claude 3.5 Sonnet (Old)",
|
||||
},
|
||||
{
|
||||
id: "claude-3-haiku-20240307",
|
||||
name: "Claude 3 Haiku",
|
||||
},
|
||||
{
|
||||
id: "claude-3-opus-20240229",
|
||||
name: "Claude 3 Opus",
|
||||
},
|
||||
{
|
||||
id: "claude-3-sonnet-20240229",
|
||||
name: "Claude 3 Sonnet",
|
||||
},
|
||||
{
|
||||
id: "claude-2.1",
|
||||
name: "Claude 2.1",
|
||||
},
|
||||
{
|
||||
id: "claude-2.0",
|
||||
name: "Claude 2.0",
|
||||
},
|
||||
];
|
||||
|
||||
function AnthropicModelSelection({ apiKey, settings }) {
|
||||
const [models, setModels] = useState(DEFAULT_MODELS);
|
||||
const [loading, setLoading] = useState(true);
|
||||
|
||||
useEffect(() => {
|
||||
async function findCustomModels() {
|
||||
setLoading(true);
|
||||
const { models } = await System.customModels(
|
||||
"anthropic",
|
||||
typeof apiKey === "boolean" ? null : apiKey
|
||||
);
|
||||
if (models.length > 0) setModels(models);
|
||||
setLoading(false);
|
||||
}
|
||||
findCustomModels();
|
||||
}, [apiKey]);
|
||||
|
||||
if (loading) {
|
||||
return (
|
||||
<div className="flex flex-col w-60">
|
||||
<label className="text-white text-sm font-semibold block mb-3">
|
||||
Chat Model Selection
|
||||
</label>
|
||||
<select
|
||||
name="AnthropicModelPref"
|
||||
defaultValue={settings?.AnthropicModelPref || "claude-2"}
|
||||
required={true}
|
||||
disabled={true}
|
||||
className="border-none bg-theme-settings-input-bg border-gray-500 text-white text-sm rounded-lg block w-full p-2.5"
|
||||
>
|
||||
{[
|
||||
"claude-instant-1.2",
|
||||
"claude-2.0",
|
||||
"claude-2.1",
|
||||
"claude-3-haiku-20240307",
|
||||
"claude-3-sonnet-20240229",
|
||||
"claude-3-opus-latest",
|
||||
"claude-3-5-haiku-latest",
|
||||
"claude-3-5-haiku-20241022",
|
||||
"claude-3-5-sonnet-latest",
|
||||
"claude-3-5-sonnet-20241022",
|
||||
"claude-3-5-sonnet-20240620",
|
||||
"claude-3-7-sonnet-20250219",
|
||||
"claude-3-7-sonnet-latest",
|
||||
].map((model) => {
|
||||
return (
|
||||
<option key={model} value={model}>
|
||||
{model}
|
||||
<option disabled={true} selected={true}>
|
||||
-- loading available models --
|
||||
</option>
|
||||
);
|
||||
})}
|
||||
</select>
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
return (
|
||||
<div className="flex flex-col w-60">
|
||||
<label className="text-white text-sm font-semibold block mb-3">
|
||||
Chat Model Selection
|
||||
</label>
|
||||
<select
|
||||
name="AnthropicModelPref"
|
||||
required={true}
|
||||
className="border-none bg-theme-settings-input-bg border-gray-500 text-white text-sm rounded-lg block w-full p-2.5"
|
||||
>
|
||||
{models.map((model) => (
|
||||
<option
|
||||
key={model.id}
|
||||
value={model.id}
|
||||
selected={settings?.AnthropicModelPref === model.id}
|
||||
>
|
||||
{model.name}
|
||||
</option>
|
||||
))}
|
||||
</select>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
|
|
@ -25,21 +25,7 @@ const PROVIDER_DEFAULT_MODELS = {
|
|||
"learnlm-1.5-pro-experimental",
|
||||
"gemini-2.0-flash-exp",
|
||||
],
|
||||
anthropic: [
|
||||
"claude-instant-1.2",
|
||||
"claude-2.0",
|
||||
"claude-2.1",
|
||||
"claude-3-haiku-20240307",
|
||||
"claude-3-sonnet-20240229",
|
||||
"claude-3-opus-latest",
|
||||
"claude-3-5-haiku-latest",
|
||||
"claude-3-5-haiku-20241022",
|
||||
"claude-3-5-sonnet-latest",
|
||||
"claude-3-5-sonnet-20241022",
|
||||
"claude-3-5-sonnet-20240620",
|
||||
"claude-3-7-sonnet-20250219",
|
||||
"claude-3-7-sonnet-latest",
|
||||
],
|
||||
anthropic: [],
|
||||
azure: [],
|
||||
lmstudio: [],
|
||||
localai: [],
|
||||
|
|
|
@ -73,7 +73,7 @@ export default function ChatModelSelection({
|
|||
</optgroup>
|
||||
)}
|
||||
{Array.isArray(customModels) && customModels.length > 0 && (
|
||||
<optgroup label="Custom models">
|
||||
<optgroup label="Discovered models">
|
||||
{customModels.map((model) => {
|
||||
return (
|
||||
<option
|
||||
|
|
|
@ -19,7 +19,7 @@
|
|||
"seed": "node prisma/seed.js"
|
||||
},
|
||||
"dependencies": {
|
||||
"@anthropic-ai/sdk": "^0.32.1",
|
||||
"@anthropic-ai/sdk": "^0.39.0",
|
||||
"@azure/openai": "1.0.0-beta.10",
|
||||
"@datastax/astra-db-ts": "^0.1.3",
|
||||
"@google/generative-ai": "^0.7.1",
|
||||
|
|
|
@ -22,7 +22,9 @@ class AnthropicLLM {
|
|||
});
|
||||
this.anthropic = anthropic;
|
||||
this.model =
|
||||
modelPreference || process.env.ANTHROPIC_MODEL_PREF || "claude-2.0";
|
||||
modelPreference ||
|
||||
process.env.ANTHROPIC_MODEL_PREF ||
|
||||
"claude-3-5-sonnet-20241022";
|
||||
this.limits = {
|
||||
history: this.promptWindowLimit() * 0.15,
|
||||
system: this.promptWindowLimit() * 0.15,
|
||||
|
@ -31,6 +33,11 @@ class AnthropicLLM {
|
|||
|
||||
this.embedder = embedder ?? new NativeEmbedder();
|
||||
this.defaultTemp = 0.7;
|
||||
this.log(`Initialized with ${this.model}`);
|
||||
}
|
||||
|
||||
log(text, ...args) {
|
||||
console.log(`\x1b[36m[${this.constructor.name}]\x1b[0m ${text}`, ...args);
|
||||
}
|
||||
|
||||
streamingEnabled() {
|
||||
|
@ -45,23 +52,8 @@ class AnthropicLLM {
|
|||
return MODEL_MAP.anthropic[this.model] ?? 100_000;
|
||||
}
|
||||
|
||||
isValidChatCompletionModel(modelName = "") {
|
||||
const validModels = [
|
||||
"claude-instant-1.2",
|
||||
"claude-2.0",
|
||||
"claude-2.1",
|
||||
"claude-3-haiku-20240307",
|
||||
"claude-3-sonnet-20240229",
|
||||
"claude-3-opus-latest",
|
||||
"claude-3-5-haiku-latest",
|
||||
"claude-3-5-haiku-20241022",
|
||||
"claude-3-5-sonnet-latest",
|
||||
"claude-3-5-sonnet-20241022",
|
||||
"claude-3-5-sonnet-20240620",
|
||||
"claude-3-7-sonnet-20250219",
|
||||
"claude-3-7-sonnet-latest",
|
||||
];
|
||||
return validModels.includes(modelName);
|
||||
isValidChatCompletionModel(_modelName = "") {
|
||||
return true;
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -111,11 +103,6 @@ class AnthropicLLM {
|
|||
}
|
||||
|
||||
async getChatCompletion(messages = null, { temperature = 0.7 }) {
|
||||
if (!this.isValidChatCompletionModel(this.model))
|
||||
throw new Error(
|
||||
`Anthropic chat: ${this.model} is not valid for chat completion!`
|
||||
);
|
||||
|
||||
try {
|
||||
const result = await LLMPerformanceMonitor.measureAsyncFunction(
|
||||
this.anthropic.messages.create({
|
||||
|
@ -146,11 +133,6 @@ class AnthropicLLM {
|
|||
}
|
||||
|
||||
async streamGetChatCompletion(messages = null, { temperature = 0.7 }) {
|
||||
if (!this.isValidChatCompletionModel(this.model))
|
||||
throw new Error(
|
||||
`Anthropic chat: ${this.model} is not valid for chat completion!`
|
||||
);
|
||||
|
||||
const measuredStreamRequest = await LLMPerformanceMonitor.measureStream(
|
||||
this.anthropic.messages.stream({
|
||||
model: this.model,
|
||||
|
|
|
@ -12,6 +12,7 @@ const { GeminiLLM } = require("../AiProviders/gemini");
|
|||
|
||||
const SUPPORT_CUSTOM_MODELS = [
|
||||
"openai",
|
||||
"anthropic",
|
||||
"localai",
|
||||
"ollama",
|
||||
"togetherai",
|
||||
|
@ -40,6 +41,8 @@ async function getCustomModels(provider = "", apiKey = null, basePath = null) {
|
|||
switch (provider) {
|
||||
case "openai":
|
||||
return await openAiModels(apiKey);
|
||||
case "anthropic":
|
||||
return await anthropicModels(apiKey);
|
||||
case "localai":
|
||||
return await localAIModels(basePath, apiKey);
|
||||
case "ollama":
|
||||
|
@ -185,6 +188,36 @@ async function openAiModels(apiKey = null) {
|
|||
return { models: [...gpts, ...customModels], error: null };
|
||||
}
|
||||
|
||||
async function anthropicModels(_apiKey = null) {
|
||||
const apiKey =
|
||||
_apiKey === true
|
||||
? process.env.ANTHROPIC_API_KEY
|
||||
: _apiKey || process.env.ANTHROPIC_API_KEY || null;
|
||||
const AnthropicAI = require("@anthropic-ai/sdk");
|
||||
const anthropic = new AnthropicAI({ apiKey });
|
||||
const models = await anthropic.models
|
||||
.list()
|
||||
.then((results) => results.data)
|
||||
.then((models) => {
|
||||
return models
|
||||
.filter((model) => model.type === "model")
|
||||
.map((model) => {
|
||||
return {
|
||||
id: model.id,
|
||||
name: model.display_name,
|
||||
};
|
||||
});
|
||||
})
|
||||
.catch((e) => {
|
||||
console.error(`Anthropic:listModels`, e.message);
|
||||
return [];
|
||||
});
|
||||
|
||||
// Api Key was successful so lets save it for future uses
|
||||
if (models.length > 0 && !!apiKey) process.env.ANTHROPIC_API_KEY = apiKey;
|
||||
return { models, error: null };
|
||||
}
|
||||
|
||||
async function localAIModels(basePath = null, apiKey = null) {
|
||||
const { OpenAI: OpenAIApi } = require("openai");
|
||||
const openai = new OpenAIApi({
|
||||
|
|
|
@ -52,7 +52,7 @@ const KEY_MAPPING = {
|
|||
},
|
||||
AnthropicModelPref: {
|
||||
envKey: "ANTHROPIC_MODEL_PREF",
|
||||
checks: [isNotEmpty, validAnthropicModel],
|
||||
checks: [isNotEmpty],
|
||||
},
|
||||
|
||||
GeminiLLMApiKey: {
|
||||
|
@ -755,27 +755,6 @@ function validGeminiSafetySetting(input = "") {
|
|||
: `Invalid Safety setting. Must be one of ${validModes.join(", ")}.`;
|
||||
}
|
||||
|
||||
function validAnthropicModel(input = "") {
|
||||
const validModels = [
|
||||
"claude-instant-1.2",
|
||||
"claude-2.0",
|
||||
"claude-2.1",
|
||||
"claude-3-haiku-20240307",
|
||||
"claude-3-sonnet-20240229",
|
||||
"claude-3-opus-latest",
|
||||
"claude-3-5-haiku-latest",
|
||||
"claude-3-5-haiku-20241022",
|
||||
"claude-3-5-sonnet-latest",
|
||||
"claude-3-5-sonnet-20241022",
|
||||
"claude-3-5-sonnet-20240620",
|
||||
"claude-3-7-sonnet-20250219",
|
||||
"claude-3-7-sonnet-latest",
|
||||
];
|
||||
return validModels.includes(input)
|
||||
? null
|
||||
: `Invalid Model type. Must be one of ${validModels.join(", ")}.`;
|
||||
}
|
||||
|
||||
function supportedEmbeddingModel(input = "") {
|
||||
const supported = [
|
||||
"openai",
|
||||
|
|
|
@ -24,10 +24,10 @@
|
|||
node-fetch "^2.6.7"
|
||||
web-streams-polyfill "^3.2.1"
|
||||
|
||||
"@anthropic-ai/sdk@^0.32.1":
|
||||
version "0.32.1"
|
||||
resolved "https://registry.yarnpkg.com/@anthropic-ai/sdk/-/sdk-0.32.1.tgz#d22c8ebae2adccc59d78fb416e89de337ff09014"
|
||||
integrity sha512-U9JwTrDvdQ9iWuABVsMLj8nJVwAyQz6QXvgLsVhryhCEPkLsbcP/MXxm+jYcAwLoV8ESbaTTjnD4kuAFa+Hyjg==
|
||||
"@anthropic-ai/sdk@^0.39.0":
|
||||
version "0.39.0"
|
||||
resolved "https://registry.yarnpkg.com/@anthropic-ai/sdk/-/sdk-0.39.0.tgz#624d5b33413a9cc322febb64e9d48bdcf5a98cdc"
|
||||
integrity sha512-eMyDIPRZbt1CCLErRCi3exlAvNkBtRe+kW5vvJyef93PmNr/clstYgHhtvmkxN82nlKgzyGPCyGxrm0JQ1ZIdg==
|
||||
dependencies:
|
||||
"@types/node" "^18.11.18"
|
||||
"@types/node-fetch" "^2.6.4"
|
||||
|
|
Loading…
Add table
Reference in a new issue