Merge branch 'master' of github.com:Mintplex-Labs/anything-llm

This commit is contained in:
timothycarambat 2025-03-04 09:08:21 -08:00
commit 0b1a0e0a1f
9 changed files with 166 additions and 106 deletions
.github/workflows
frontend/src
components/LLMSelection/AnthropicAiOptions
hooks
pages/WorkspaceSettings/ChatSettings/WorkspaceLLMSelection/ChatModelSelection
server

View file

@ -6,7 +6,7 @@ concurrency:
on: on:
push: push:
branches: ['sharp-pdf-image-converter'] # put your current branch to create a build. Core team only. branches: ['chore/anthropic-model-endpoint'] # put your current branch to create a build. Core team only.
paths-ignore: paths-ignore:
- '**.md' - '**.md'
- 'cloud-deployments/*' - 'cloud-deployments/*'

View file

@ -1,4 +1,12 @@
import { useState, useEffect } from "react";
import System from "@/models/system";
export default function AnthropicAiOptions({ settings }) { export default function AnthropicAiOptions({ settings }) {
const [inputValue, setInputValue] = useState(settings?.AnthropicApiKey);
const [anthropicApiKey, setAnthropicApiKey] = useState(
settings?.AnthropicApiKey
);
return ( return (
<div className="w-full flex flex-col"> <div className="w-full flex flex-col">
<div className="w-full flex items-center gap-[36px] mt-1.5"> <div className="w-full flex items-center gap-[36px] mt-1.5">
@ -15,45 +23,117 @@ export default function AnthropicAiOptions({ settings }) {
required={true} required={true}
autoComplete="off" autoComplete="off"
spellCheck={false} spellCheck={false}
onChange={(e) => setInputValue(e.target.value)}
onBlur={() => setAnthropicApiKey(inputValue)}
/> />
</div> </div>
{!settings?.credentialsOnly && ( {!settings?.credentialsOnly && (
<div className="flex flex-col w-60"> <AnthropicModelSelection
<label className="text-white text-sm font-semibold block mb-3"> apiKey={anthropicApiKey}
Chat Model Selection settings={settings}
</label> />
<select
name="AnthropicModelPref"
defaultValue={settings?.AnthropicModelPref || "claude-2"}
required={true}
className="border-none bg-theme-settings-input-bg border-gray-500 text-white text-sm rounded-lg block w-full p-2.5"
>
{[
"claude-instant-1.2",
"claude-2.0",
"claude-2.1",
"claude-3-haiku-20240307",
"claude-3-sonnet-20240229",
"claude-3-opus-latest",
"claude-3-5-haiku-latest",
"claude-3-5-haiku-20241022",
"claude-3-5-sonnet-latest",
"claude-3-5-sonnet-20241022",
"claude-3-5-sonnet-20240620",
"claude-3-7-sonnet-20250219",
"claude-3-7-sonnet-latest",
].map((model) => {
return (
<option key={model} value={model}>
{model}
</option>
);
})}
</select>
</div>
)} )}
</div> </div>
</div> </div>
); );
} }
const DEFAULT_MODELS = [
{
id: "claude-3-7-sonnet-20250219",
name: "Claude 3.7 Sonnet",
},
{
id: "claude-3-5-sonnet-20241022",
name: "Claude 3.5 Sonnet (New)",
},
{
id: "claude-3-5-haiku-20241022",
name: "Claude 3.5 Haiku",
},
{
id: "claude-3-5-sonnet-20240620",
name: "Claude 3.5 Sonnet (Old)",
},
{
id: "claude-3-haiku-20240307",
name: "Claude 3 Haiku",
},
{
id: "claude-3-opus-20240229",
name: "Claude 3 Opus",
},
{
id: "claude-3-sonnet-20240229",
name: "Claude 3 Sonnet",
},
{
id: "claude-2.1",
name: "Claude 2.1",
},
{
id: "claude-2.0",
name: "Claude 2.0",
},
];
function AnthropicModelSelection({ apiKey, settings }) {
const [models, setModels] = useState(DEFAULT_MODELS);
const [loading, setLoading] = useState(true);
useEffect(() => {
async function findCustomModels() {
setLoading(true);
const { models } = await System.customModels(
"anthropic",
typeof apiKey === "boolean" ? null : apiKey
);
if (models.length > 0) setModels(models);
setLoading(false);
}
findCustomModels();
}, [apiKey]);
if (loading) {
return (
<div className="flex flex-col w-60">
<label className="text-white text-sm font-semibold block mb-3">
Chat Model Selection
</label>
<select
name="AnthropicModelPref"
disabled={true}
className="border-none bg-theme-settings-input-bg border-gray-500 text-white text-sm rounded-lg block w-full p-2.5"
>
<option disabled={true} selected={true}>
-- loading available models --
</option>
</select>
</div>
);
}
return (
<div className="flex flex-col w-60">
<label className="text-white text-sm font-semibold block mb-3">
Chat Model Selection
</label>
<select
name="AnthropicModelPref"
required={true}
className="border-none bg-theme-settings-input-bg border-gray-500 text-white text-sm rounded-lg block w-full p-2.5"
>
{models.map((model) => (
<option
key={model.id}
value={model.id}
selected={settings?.AnthropicModelPref === model.id}
>
{model.name}
</option>
))}
</select>
</div>
);
}

View file

@ -25,21 +25,7 @@ const PROVIDER_DEFAULT_MODELS = {
"learnlm-1.5-pro-experimental", "learnlm-1.5-pro-experimental",
"gemini-2.0-flash-exp", "gemini-2.0-flash-exp",
], ],
anthropic: [ anthropic: [],
"claude-instant-1.2",
"claude-2.0",
"claude-2.1",
"claude-3-haiku-20240307",
"claude-3-sonnet-20240229",
"claude-3-opus-latest",
"claude-3-5-haiku-latest",
"claude-3-5-haiku-20241022",
"claude-3-5-sonnet-latest",
"claude-3-5-sonnet-20241022",
"claude-3-5-sonnet-20240620",
"claude-3-7-sonnet-20250219",
"claude-3-7-sonnet-latest",
],
azure: [], azure: [],
lmstudio: [], lmstudio: [],
localai: [], localai: [],

View file

@ -73,7 +73,7 @@ export default function ChatModelSelection({
</optgroup> </optgroup>
)} )}
{Array.isArray(customModels) && customModels.length > 0 && ( {Array.isArray(customModels) && customModels.length > 0 && (
<optgroup label="Custom models"> <optgroup label="Discovered models">
{customModels.map((model) => { {customModels.map((model) => {
return ( return (
<option <option

View file

@ -19,7 +19,7 @@
"seed": "node prisma/seed.js" "seed": "node prisma/seed.js"
}, },
"dependencies": { "dependencies": {
"@anthropic-ai/sdk": "^0.32.1", "@anthropic-ai/sdk": "^0.39.0",
"@azure/openai": "1.0.0-beta.10", "@azure/openai": "1.0.0-beta.10",
"@datastax/astra-db-ts": "^0.1.3", "@datastax/astra-db-ts": "^0.1.3",
"@google/generative-ai": "^0.7.1", "@google/generative-ai": "^0.7.1",
@ -98,4 +98,4 @@
"prettier": "^3.0.3", "prettier": "^3.0.3",
"cross-env": "^7.0.3" "cross-env": "^7.0.3"
} }
} }

View file

@ -22,7 +22,9 @@ class AnthropicLLM {
}); });
this.anthropic = anthropic; this.anthropic = anthropic;
this.model = this.model =
modelPreference || process.env.ANTHROPIC_MODEL_PREF || "claude-2.0"; modelPreference ||
process.env.ANTHROPIC_MODEL_PREF ||
"claude-3-5-sonnet-20241022";
this.limits = { this.limits = {
history: this.promptWindowLimit() * 0.15, history: this.promptWindowLimit() * 0.15,
system: this.promptWindowLimit() * 0.15, system: this.promptWindowLimit() * 0.15,
@ -31,6 +33,11 @@ class AnthropicLLM {
this.embedder = embedder ?? new NativeEmbedder(); this.embedder = embedder ?? new NativeEmbedder();
this.defaultTemp = 0.7; this.defaultTemp = 0.7;
this.log(`Initialized with ${this.model}`);
}
log(text, ...args) {
console.log(`\x1b[36m[${this.constructor.name}]\x1b[0m ${text}`, ...args);
} }
streamingEnabled() { streamingEnabled() {
@ -45,23 +52,8 @@ class AnthropicLLM {
return MODEL_MAP.anthropic[this.model] ?? 100_000; return MODEL_MAP.anthropic[this.model] ?? 100_000;
} }
isValidChatCompletionModel(modelName = "") { isValidChatCompletionModel(_modelName = "") {
const validModels = [ return true;
"claude-instant-1.2",
"claude-2.0",
"claude-2.1",
"claude-3-haiku-20240307",
"claude-3-sonnet-20240229",
"claude-3-opus-latest",
"claude-3-5-haiku-latest",
"claude-3-5-haiku-20241022",
"claude-3-5-sonnet-latest",
"claude-3-5-sonnet-20241022",
"claude-3-5-sonnet-20240620",
"claude-3-7-sonnet-20250219",
"claude-3-7-sonnet-latest",
];
return validModels.includes(modelName);
} }
/** /**
@ -111,11 +103,6 @@ class AnthropicLLM {
} }
async getChatCompletion(messages = null, { temperature = 0.7 }) { async getChatCompletion(messages = null, { temperature = 0.7 }) {
if (!this.isValidChatCompletionModel(this.model))
throw new Error(
`Anthropic chat: ${this.model} is not valid for chat completion!`
);
try { try {
const result = await LLMPerformanceMonitor.measureAsyncFunction( const result = await LLMPerformanceMonitor.measureAsyncFunction(
this.anthropic.messages.create({ this.anthropic.messages.create({
@ -146,11 +133,6 @@ class AnthropicLLM {
} }
async streamGetChatCompletion(messages = null, { temperature = 0.7 }) { async streamGetChatCompletion(messages = null, { temperature = 0.7 }) {
if (!this.isValidChatCompletionModel(this.model))
throw new Error(
`Anthropic chat: ${this.model} is not valid for chat completion!`
);
const measuredStreamRequest = await LLMPerformanceMonitor.measureStream( const measuredStreamRequest = await LLMPerformanceMonitor.measureStream(
this.anthropic.messages.stream({ this.anthropic.messages.stream({
model: this.model, model: this.model,

View file

@ -12,6 +12,7 @@ const { GeminiLLM } = require("../AiProviders/gemini");
const SUPPORT_CUSTOM_MODELS = [ const SUPPORT_CUSTOM_MODELS = [
"openai", "openai",
"anthropic",
"localai", "localai",
"ollama", "ollama",
"togetherai", "togetherai",
@ -40,6 +41,8 @@ async function getCustomModels(provider = "", apiKey = null, basePath = null) {
switch (provider) { switch (provider) {
case "openai": case "openai":
return await openAiModels(apiKey); return await openAiModels(apiKey);
case "anthropic":
return await anthropicModels(apiKey);
case "localai": case "localai":
return await localAIModels(basePath, apiKey); return await localAIModels(basePath, apiKey);
case "ollama": case "ollama":
@ -185,6 +188,36 @@ async function openAiModels(apiKey = null) {
return { models: [...gpts, ...customModels], error: null }; return { models: [...gpts, ...customModels], error: null };
} }
async function anthropicModels(_apiKey = null) {
const apiKey =
_apiKey === true
? process.env.ANTHROPIC_API_KEY
: _apiKey || process.env.ANTHROPIC_API_KEY || null;
const AnthropicAI = require("@anthropic-ai/sdk");
const anthropic = new AnthropicAI({ apiKey });
const models = await anthropic.models
.list()
.then((results) => results.data)
.then((models) => {
return models
.filter((model) => model.type === "model")
.map((model) => {
return {
id: model.id,
name: model.display_name,
};
});
})
.catch((e) => {
console.error(`Anthropic:listModels`, e.message);
return [];
});
// Api Key was successful so lets save it for future uses
if (models.length > 0 && !!apiKey) process.env.ANTHROPIC_API_KEY = apiKey;
return { models, error: null };
}
async function localAIModels(basePath = null, apiKey = null) { async function localAIModels(basePath = null, apiKey = null) {
const { OpenAI: OpenAIApi } = require("openai"); const { OpenAI: OpenAIApi } = require("openai");
const openai = new OpenAIApi({ const openai = new OpenAIApi({

View file

@ -52,7 +52,7 @@ const KEY_MAPPING = {
}, },
AnthropicModelPref: { AnthropicModelPref: {
envKey: "ANTHROPIC_MODEL_PREF", envKey: "ANTHROPIC_MODEL_PREF",
checks: [isNotEmpty, validAnthropicModel], checks: [isNotEmpty],
}, },
GeminiLLMApiKey: { GeminiLLMApiKey: {
@ -755,27 +755,6 @@ function validGeminiSafetySetting(input = "") {
: `Invalid Safety setting. Must be one of ${validModes.join(", ")}.`; : `Invalid Safety setting. Must be one of ${validModes.join(", ")}.`;
} }
function validAnthropicModel(input = "") {
const validModels = [
"claude-instant-1.2",
"claude-2.0",
"claude-2.1",
"claude-3-haiku-20240307",
"claude-3-sonnet-20240229",
"claude-3-opus-latest",
"claude-3-5-haiku-latest",
"claude-3-5-haiku-20241022",
"claude-3-5-sonnet-latest",
"claude-3-5-sonnet-20241022",
"claude-3-5-sonnet-20240620",
"claude-3-7-sonnet-20250219",
"claude-3-7-sonnet-latest",
];
return validModels.includes(input)
? null
: `Invalid Model type. Must be one of ${validModels.join(", ")}.`;
}
function supportedEmbeddingModel(input = "") { function supportedEmbeddingModel(input = "") {
const supported = [ const supported = [
"openai", "openai",

View file

@ -24,10 +24,10 @@
node-fetch "^2.6.7" node-fetch "^2.6.7"
web-streams-polyfill "^3.2.1" web-streams-polyfill "^3.2.1"
"@anthropic-ai/sdk@^0.32.1": "@anthropic-ai/sdk@^0.39.0":
version "0.32.1" version "0.39.0"
resolved "https://registry.yarnpkg.com/@anthropic-ai/sdk/-/sdk-0.32.1.tgz#d22c8ebae2adccc59d78fb416e89de337ff09014" resolved "https://registry.yarnpkg.com/@anthropic-ai/sdk/-/sdk-0.39.0.tgz#624d5b33413a9cc322febb64e9d48bdcf5a98cdc"
integrity sha512-U9JwTrDvdQ9iWuABVsMLj8nJVwAyQz6QXvgLsVhryhCEPkLsbcP/MXxm+jYcAwLoV8ESbaTTjnD4kuAFa+Hyjg== integrity sha512-eMyDIPRZbt1CCLErRCi3exlAvNkBtRe+kW5vvJyef93PmNr/clstYgHhtvmkxN82nlKgzyGPCyGxrm0JQ1ZIdg==
dependencies: dependencies:
"@types/node" "^18.11.18" "@types/node" "^18.11.18"
"@types/node-fetch" "^2.6.4" "@types/node-fetch" "^2.6.4"