mirror of
https://github.com/Mintplex-Labs/anything-llm.git
synced 2025-04-17 18:18:11 +00:00
Add reasoning flag for azure models with "default" fallback (#3128)
This commit is contained in:
parent
805b4f79d8
commit
e76baacec4
4 changed files with 35 additions and 12 deletions
frontend/src/components/LLMSelection/AzureAiOptions
server
|
@ -71,6 +71,21 @@ export default function AzureAiOptions({ settings }) {
|
|||
</option>
|
||||
</select>
|
||||
</div>
|
||||
|
||||
<div className="flex flex-col w-60">
|
||||
<label className="text-white text-sm font-semibold block mb-3">
|
||||
Model Type
|
||||
</label>
|
||||
<select
|
||||
name="AzureOpenAiModelType"
|
||||
defaultValue={settings?.AzureOpenAiModelType || "default"}
|
||||
className="border-none bg-theme-settings-input-bg text-white placeholder:text-theme-settings-input-placeholder text-sm rounded-lg focus:outline-primary-button active:outline-primary-button outline-none block w-full p-2.5"
|
||||
required={true}
|
||||
>
|
||||
<option value="default">Default</option>
|
||||
<option value="reasoning">Reasoning</option>
|
||||
</select>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
|
|
|
@ -442,6 +442,7 @@ const SystemSettings = {
|
|||
AzureOpenAiModelPref: process.env.OPEN_MODEL_PREF,
|
||||
AzureOpenAiEmbeddingModelPref: process.env.EMBEDDING_MODEL_PREF,
|
||||
AzureOpenAiTokenLimit: process.env.AZURE_OPENAI_TOKEN_LIMIT || 4096,
|
||||
AzureOpenAiModelType: process.env.AZURE_OPENAI_MODEL_TYPE || "default",
|
||||
|
||||
// Anthropic Keys
|
||||
AnthropicApiKey: !!process.env.ANTHROPIC_API_KEY,
|
||||
|
|
|
@ -25,6 +25,8 @@ class AzureOpenAiLLM {
|
|||
}
|
||||
);
|
||||
this.model = modelPreference ?? process.env.OPEN_MODEL_PREF;
|
||||
this.isOTypeModel =
|
||||
process.env.AZURE_OPENAI_MODEL_TYPE === "reasoning" || false;
|
||||
this.limits = {
|
||||
history: this.promptWindowLimit() * 0.15,
|
||||
system: this.promptWindowLimit() * 0.15,
|
||||
|
@ -34,20 +36,10 @@ class AzureOpenAiLLM {
|
|||
this.embedder = embedder ?? new NativeEmbedder();
|
||||
this.defaultTemp = 0.7;
|
||||
this.#log(
|
||||
`Initialized. Model "${this.model}" @ ${this.promptWindowLimit()} tokens. API-Version: ${this.apiVersion}`
|
||||
`Initialized. Model "${this.model}" @ ${this.promptWindowLimit()} tokens.\nAPI-Version: ${this.apiVersion}.\nModel Type: ${this.isOTypeModel ? "reasoning" : "default"}`
|
||||
);
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if the model is an o# type model.
|
||||
* NOTE: This is HIGHLY dependent on if the user named their deployment "o1" or "o3-mini" or something else to match the model name.
|
||||
* It cannot be determined by the model name alone since model deployments can be named arbitrarily.
|
||||
* @returns {boolean}
|
||||
*/
|
||||
get isOTypeModel() {
|
||||
return this.model.startsWith("o");
|
||||
}
|
||||
|
||||
#log(text, ...args) {
|
||||
console.log(`\x1b[32m[AzureOpenAi]\x1b[0m ${text}`, ...args);
|
||||
}
|
||||
|
@ -65,7 +57,13 @@ class AzureOpenAiLLM {
|
|||
}
|
||||
|
||||
streamingEnabled() {
|
||||
if (this.isOTypeModel && this.model !== "o3-mini") return false;
|
||||
// Streaming of reasoning models is not supported
|
||||
if (this.isOTypeModel) {
|
||||
this.#log(
|
||||
"Streaming will be disabled. AZURE_OPENAI_MODEL_TYPE is set to 'reasoning'."
|
||||
);
|
||||
return false;
|
||||
}
|
||||
return "streamGetChatCompletion" in this;
|
||||
}
|
||||
|
||||
|
|
|
@ -35,6 +35,15 @@ const KEY_MAPPING = {
|
|||
envKey: "EMBEDDING_MODEL_PREF",
|
||||
checks: [isNotEmpty],
|
||||
},
|
||||
AzureOpenAiModelType: {
|
||||
envKey: "AZURE_OPENAI_MODEL_TYPE",
|
||||
checks: [
|
||||
(input) =>
|
||||
["default", "reasoning"].includes(input)
|
||||
? null
|
||||
: "Invalid model type. Must be one of: default, reasoning.",
|
||||
],
|
||||
},
|
||||
|
||||
// Anthropic Settings
|
||||
AnthropicApiKey: {
|
||||
|
|
Loading…
Add table
Reference in a new issue