diff --git a/.github/workflows/dev-build.yaml b/.github/workflows/dev-build.yaml index 787305337..111e38d6f 100644 --- a/.github/workflows/dev-build.yaml +++ b/.github/workflows/dev-build.yaml @@ -6,7 +6,7 @@ concurrency: on: push: - branches: ['sharp-pdf-image-converter'] # put your current branch to create a build. Core team only. + branches: ['chore/anthropic-model-endpoint'] # put your current branch to create a build. Core team only. paths-ignore: - '**.md' - 'cloud-deployments/*' diff --git a/frontend/src/components/LLMSelection/AnthropicAiOptions/index.jsx b/frontend/src/components/LLMSelection/AnthropicAiOptions/index.jsx index 026282ea4..95cda6ef5 100644 --- a/frontend/src/components/LLMSelection/AnthropicAiOptions/index.jsx +++ b/frontend/src/components/LLMSelection/AnthropicAiOptions/index.jsx @@ -1,4 +1,12 @@ +import { useState, useEffect } from "react"; +import System from "@/models/system"; + export default function AnthropicAiOptions({ settings }) { + const [inputValue, setInputValue] = useState(settings?.AnthropicApiKey); + const [anthropicApiKey, setAnthropicApiKey] = useState( + settings?.AnthropicApiKey + ); + return ( <div className="w-full flex flex-col"> <div className="w-full flex items-center gap-[36px] mt-1.5"> @@ -15,45 +23,117 @@ export default function AnthropicAiOptions({ settings }) { required={true} autoComplete="off" spellCheck={false} + onChange={(e) => setInputValue(e.target.value)} + onBlur={() => setAnthropicApiKey(inputValue)} /> </div> {!settings?.credentialsOnly && ( - <div className="flex flex-col w-60"> - <label className="text-white text-sm font-semibold block mb-3"> - Chat Model Selection - </label> - <select - name="AnthropicModelPref" - defaultValue={settings?.AnthropicModelPref || "claude-2"} - required={true} - className="border-none bg-theme-settings-input-bg border-gray-500 text-white text-sm rounded-lg block w-full p-2.5" - > - {[ - "claude-instant-1.2", - "claude-2.0", - "claude-2.1", - "claude-3-haiku-20240307", - "claude-3-sonnet-20240229", - "claude-3-opus-latest", - "claude-3-5-haiku-latest", - "claude-3-5-haiku-20241022", - "claude-3-5-sonnet-latest", - "claude-3-5-sonnet-20241022", - "claude-3-5-sonnet-20240620", - "claude-3-7-sonnet-20250219", - "claude-3-7-sonnet-latest", - ].map((model) => { - return ( - <option key={model} value={model}> - {model} - </option> - ); - })} - </select> - </div> + <AnthropicModelSelection + apiKey={anthropicApiKey} + settings={settings} + /> )} </div> </div> ); } + +const DEFAULT_MODELS = [ + { + id: "claude-3-7-sonnet-20250219", + name: "Claude 3.7 Sonnet", + }, + { + id: "claude-3-5-sonnet-20241022", + name: "Claude 3.5 Sonnet (New)", + }, + { + id: "claude-3-5-haiku-20241022", + name: "Claude 3.5 Haiku", + }, + { + id: "claude-3-5-sonnet-20240620", + name: "Claude 3.5 Sonnet (Old)", + }, + { + id: "claude-3-haiku-20240307", + name: "Claude 3 Haiku", + }, + { + id: "claude-3-opus-20240229", + name: "Claude 3 Opus", + }, + { + id: "claude-3-sonnet-20240229", + name: "Claude 3 Sonnet", + }, + { + id: "claude-2.1", + name: "Claude 2.1", + }, + { + id: "claude-2.0", + name: "Claude 2.0", + }, +]; + +function AnthropicModelSelection({ apiKey, settings }) { + const [models, setModels] = useState(DEFAULT_MODELS); + const [loading, setLoading] = useState(true); + + useEffect(() => { + async function findCustomModels() { + setLoading(true); + const { models } = await System.customModels( + "anthropic", + typeof apiKey === "boolean" ? null : apiKey + ); + if (models.length > 0) setModels(models); + setLoading(false); + } + findCustomModels(); + }, [apiKey]); + + if (loading) { + return ( + <div className="flex flex-col w-60"> + <label className="text-white text-sm font-semibold block mb-3"> + Chat Model Selection + </label> + <select + name="AnthropicModelPref" + disabled={true} + className="border-none bg-theme-settings-input-bg border-gray-500 text-white text-sm rounded-lg block w-full p-2.5" + > + <option disabled={true} selected={true}> + -- loading available models -- + </option> + </select> + </div> + ); + } + + return ( + <div className="flex flex-col w-60"> + <label className="text-white text-sm font-semibold block mb-3"> + Chat Model Selection + </label> + <select + name="AnthropicModelPref" + required={true} + className="border-none bg-theme-settings-input-bg border-gray-500 text-white text-sm rounded-lg block w-full p-2.5" + > + {models.map((model) => ( + <option + key={model.id} + value={model.id} + selected={settings?.AnthropicModelPref === model.id} + > + {model.name} + </option> + ))} + </select> + </div> + ); +} diff --git a/frontend/src/hooks/useGetProvidersModels.js b/frontend/src/hooks/useGetProvidersModels.js index 2019a19df..0b3143490 100644 --- a/frontend/src/hooks/useGetProvidersModels.js +++ b/frontend/src/hooks/useGetProvidersModels.js @@ -25,21 +25,7 @@ const PROVIDER_DEFAULT_MODELS = { "learnlm-1.5-pro-experimental", "gemini-2.0-flash-exp", ], - anthropic: [ - "claude-instant-1.2", - "claude-2.0", - "claude-2.1", - "claude-3-haiku-20240307", - "claude-3-sonnet-20240229", - "claude-3-opus-latest", - "claude-3-5-haiku-latest", - "claude-3-5-haiku-20241022", - "claude-3-5-sonnet-latest", - "claude-3-5-sonnet-20241022", - "claude-3-5-sonnet-20240620", - "claude-3-7-sonnet-20250219", - "claude-3-7-sonnet-latest", - ], + anthropic: [], azure: [], lmstudio: [], localai: [], diff --git a/frontend/src/pages/WorkspaceSettings/ChatSettings/WorkspaceLLMSelection/ChatModelSelection/index.jsx b/frontend/src/pages/WorkspaceSettings/ChatSettings/WorkspaceLLMSelection/ChatModelSelection/index.jsx index 9c794a15e..9440cddbc 100644 --- a/frontend/src/pages/WorkspaceSettings/ChatSettings/WorkspaceLLMSelection/ChatModelSelection/index.jsx +++ b/frontend/src/pages/WorkspaceSettings/ChatSettings/WorkspaceLLMSelection/ChatModelSelection/index.jsx @@ -73,7 +73,7 @@ export default function ChatModelSelection({ </optgroup> )} {Array.isArray(customModels) && customModels.length > 0 && ( - <optgroup label="Custom models"> + <optgroup label="Discovered models"> {customModels.map((model) => { return ( <option diff --git a/server/package.json b/server/package.json index 88505273a..e34b5c4ee 100644 --- a/server/package.json +++ b/server/package.json @@ -19,7 +19,7 @@ "seed": "node prisma/seed.js" }, "dependencies": { - "@anthropic-ai/sdk": "^0.32.1", + "@anthropic-ai/sdk": "^0.39.0", "@azure/openai": "1.0.0-beta.10", "@datastax/astra-db-ts": "^0.1.3", "@google/generative-ai": "^0.7.1", @@ -98,4 +98,4 @@ "prettier": "^3.0.3", "cross-env": "^7.0.3" } -} +} \ No newline at end of file diff --git a/server/utils/AiProviders/anthropic/index.js b/server/utils/AiProviders/anthropic/index.js index f01eb973a..c4f53acf6 100644 --- a/server/utils/AiProviders/anthropic/index.js +++ b/server/utils/AiProviders/anthropic/index.js @@ -22,7 +22,9 @@ class AnthropicLLM { }); this.anthropic = anthropic; this.model = - modelPreference || process.env.ANTHROPIC_MODEL_PREF || "claude-2.0"; + modelPreference || + process.env.ANTHROPIC_MODEL_PREF || + "claude-3-5-sonnet-20241022"; this.limits = { history: this.promptWindowLimit() * 0.15, system: this.promptWindowLimit() * 0.15, @@ -31,6 +33,11 @@ class AnthropicLLM { this.embedder = embedder ?? new NativeEmbedder(); this.defaultTemp = 0.7; + this.log(`Initialized with ${this.model}`); + } + + log(text, ...args) { + console.log(`\x1b[36m[${this.constructor.name}]\x1b[0m ${text}`, ...args); } streamingEnabled() { @@ -45,23 +52,8 @@ class AnthropicLLM { return MODEL_MAP.anthropic[this.model] ?? 100_000; } - isValidChatCompletionModel(modelName = "") { - const validModels = [ - "claude-instant-1.2", - "claude-2.0", - "claude-2.1", - "claude-3-haiku-20240307", - "claude-3-sonnet-20240229", - "claude-3-opus-latest", - "claude-3-5-haiku-latest", - "claude-3-5-haiku-20241022", - "claude-3-5-sonnet-latest", - "claude-3-5-sonnet-20241022", - "claude-3-5-sonnet-20240620", - "claude-3-7-sonnet-20250219", - "claude-3-7-sonnet-latest", - ]; - return validModels.includes(modelName); + isValidChatCompletionModel(_modelName = "") { + return true; } /** @@ -111,11 +103,6 @@ class AnthropicLLM { } async getChatCompletion(messages = null, { temperature = 0.7 }) { - if (!this.isValidChatCompletionModel(this.model)) - throw new Error( - `Anthropic chat: ${this.model} is not valid for chat completion!` - ); - try { const result = await LLMPerformanceMonitor.measureAsyncFunction( this.anthropic.messages.create({ @@ -146,11 +133,6 @@ class AnthropicLLM { } async streamGetChatCompletion(messages = null, { temperature = 0.7 }) { - if (!this.isValidChatCompletionModel(this.model)) - throw new Error( - `Anthropic chat: ${this.model} is not valid for chat completion!` - ); - const measuredStreamRequest = await LLMPerformanceMonitor.measureStream( this.anthropic.messages.stream({ model: this.model, diff --git a/server/utils/helpers/customModels.js b/server/utils/helpers/customModels.js index 68fec4651..225a77600 100644 --- a/server/utils/helpers/customModels.js +++ b/server/utils/helpers/customModels.js @@ -12,6 +12,7 @@ const { GeminiLLM } = require("../AiProviders/gemini"); const SUPPORT_CUSTOM_MODELS = [ "openai", + "anthropic", "localai", "ollama", "togetherai", @@ -40,6 +41,8 @@ async function getCustomModels(provider = "", apiKey = null, basePath = null) { switch (provider) { case "openai": return await openAiModels(apiKey); + case "anthropic": + return await anthropicModels(apiKey); case "localai": return await localAIModels(basePath, apiKey); case "ollama": @@ -185,6 +188,36 @@ async function openAiModels(apiKey = null) { return { models: [...gpts, ...customModels], error: null }; } +async function anthropicModels(_apiKey = null) { + const apiKey = + _apiKey === true + ? process.env.ANTHROPIC_API_KEY + : _apiKey || process.env.ANTHROPIC_API_KEY || null; + const AnthropicAI = require("@anthropic-ai/sdk"); + const anthropic = new AnthropicAI({ apiKey }); + const models = await anthropic.models + .list() + .then((results) => results.data) + .then((models) => { + return models + .filter((model) => model.type === "model") + .map((model) => { + return { + id: model.id, + name: model.display_name, + }; + }); + }) + .catch((e) => { + console.error(`Anthropic:listModels`, e.message); + return []; + }); + + // Api Key was successful so lets save it for future uses + if (models.length > 0 && !!apiKey) process.env.ANTHROPIC_API_KEY = apiKey; + return { models, error: null }; +} + async function localAIModels(basePath = null, apiKey = null) { const { OpenAI: OpenAIApi } = require("openai"); const openai = new OpenAIApi({ diff --git a/server/utils/helpers/updateENV.js b/server/utils/helpers/updateENV.js index d50118bef..8d5825f64 100644 --- a/server/utils/helpers/updateENV.js +++ b/server/utils/helpers/updateENV.js @@ -52,7 +52,7 @@ const KEY_MAPPING = { }, AnthropicModelPref: { envKey: "ANTHROPIC_MODEL_PREF", - checks: [isNotEmpty, validAnthropicModel], + checks: [isNotEmpty], }, GeminiLLMApiKey: { @@ -755,27 +755,6 @@ function validGeminiSafetySetting(input = "") { : `Invalid Safety setting. Must be one of ${validModes.join(", ")}.`; } -function validAnthropicModel(input = "") { - const validModels = [ - "claude-instant-1.2", - "claude-2.0", - "claude-2.1", - "claude-3-haiku-20240307", - "claude-3-sonnet-20240229", - "claude-3-opus-latest", - "claude-3-5-haiku-latest", - "claude-3-5-haiku-20241022", - "claude-3-5-sonnet-latest", - "claude-3-5-sonnet-20241022", - "claude-3-5-sonnet-20240620", - "claude-3-7-sonnet-20250219", - "claude-3-7-sonnet-latest", - ]; - return validModels.includes(input) - ? null - : `Invalid Model type. Must be one of ${validModels.join(", ")}.`; -} - function supportedEmbeddingModel(input = "") { const supported = [ "openai", diff --git a/server/yarn.lock b/server/yarn.lock index 406f00576..e32dd02b0 100644 --- a/server/yarn.lock +++ b/server/yarn.lock @@ -24,10 +24,10 @@ node-fetch "^2.6.7" web-streams-polyfill "^3.2.1" -"@anthropic-ai/sdk@^0.32.1": - version "0.32.1" - resolved "https://registry.yarnpkg.com/@anthropic-ai/sdk/-/sdk-0.32.1.tgz#d22c8ebae2adccc59d78fb416e89de337ff09014" - integrity sha512-U9JwTrDvdQ9iWuABVsMLj8nJVwAyQz6QXvgLsVhryhCEPkLsbcP/MXxm+jYcAwLoV8ESbaTTjnD4kuAFa+Hyjg== +"@anthropic-ai/sdk@^0.39.0": + version "0.39.0" + resolved "https://registry.yarnpkg.com/@anthropic-ai/sdk/-/sdk-0.39.0.tgz#624d5b33413a9cc322febb64e9d48bdcf5a98cdc" + integrity sha512-eMyDIPRZbt1CCLErRCi3exlAvNkBtRe+kW5vvJyef93PmNr/clstYgHhtvmkxN82nlKgzyGPCyGxrm0JQ1ZIdg== dependencies: "@types/node" "^18.11.18" "@types/node-fetch" "^2.6.4"