Add reasoning flag for azure models with "default" fallback ()

This commit is contained in:
Timothy Carambat 2025-02-05 15:22:45 -08:00 committed by GitHub
parent 805b4f79d8
commit e76baacec4
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
4 changed files with 35 additions and 12 deletions
frontend/src/components/LLMSelection/AzureAiOptions
server
models
utils
AiProviders/azureOpenAi
helpers

View file

@ -71,6 +71,21 @@ export default function AzureAiOptions({ settings }) {
</option>
</select>
</div>
<div className="flex flex-col w-60">
<label className="text-white text-sm font-semibold block mb-3">
Model Type
</label>
<select
name="AzureOpenAiModelType"
defaultValue={settings?.AzureOpenAiModelType || "default"}
className="border-none bg-theme-settings-input-bg text-white placeholder:text-theme-settings-input-placeholder text-sm rounded-lg focus:outline-primary-button active:outline-primary-button outline-none block w-full p-2.5"
required={true}
>
<option value="default">Default</option>
<option value="reasoning">Reasoning</option>
</select>
</div>
</div>
</div>
);

View file

@ -442,6 +442,7 @@ const SystemSettings = {
AzureOpenAiModelPref: process.env.OPEN_MODEL_PREF,
AzureOpenAiEmbeddingModelPref: process.env.EMBEDDING_MODEL_PREF,
AzureOpenAiTokenLimit: process.env.AZURE_OPENAI_TOKEN_LIMIT || 4096,
AzureOpenAiModelType: process.env.AZURE_OPENAI_MODEL_TYPE || "default",
// Anthropic Keys
AnthropicApiKey: !!process.env.ANTHROPIC_API_KEY,

View file

@ -25,6 +25,8 @@ class AzureOpenAiLLM {
}
);
this.model = modelPreference ?? process.env.OPEN_MODEL_PREF;
this.isOTypeModel =
process.env.AZURE_OPENAI_MODEL_TYPE === "reasoning" || false;
this.limits = {
history: this.promptWindowLimit() * 0.15,
system: this.promptWindowLimit() * 0.15,
@ -34,20 +36,10 @@ class AzureOpenAiLLM {
this.embedder = embedder ?? new NativeEmbedder();
this.defaultTemp = 0.7;
this.#log(
`Initialized. Model "${this.model}" @ ${this.promptWindowLimit()} tokens. API-Version: ${this.apiVersion}`
`Initialized. Model "${this.model}" @ ${this.promptWindowLimit()} tokens.\nAPI-Version: ${this.apiVersion}.\nModel Type: ${this.isOTypeModel ? "reasoning" : "default"}`
);
}
/**
* Check if the model is an o# type model.
* NOTE: This is HIGHLY dependent on if the user named their deployment "o1" or "o3-mini" or something else to match the model name.
* It cannot be determined by the model name alone since model deployments can be named arbitrarily.
* @returns {boolean}
*/
get isOTypeModel() {
return this.model.startsWith("o");
}
#log(text, ...args) {
console.log(`\x1b[32m[AzureOpenAi]\x1b[0m ${text}`, ...args);
}
@ -65,7 +57,13 @@ class AzureOpenAiLLM {
}
streamingEnabled() {
if (this.isOTypeModel && this.model !== "o3-mini") return false;
// Streaming of reasoning models is not supported
if (this.isOTypeModel) {
this.#log(
"Streaming will be disabled. AZURE_OPENAI_MODEL_TYPE is set to 'reasoning'."
);
return false;
}
return "streamGetChatCompletion" in this;
}

View file

@ -35,6 +35,15 @@ const KEY_MAPPING = {
envKey: "EMBEDDING_MODEL_PREF",
checks: [isNotEmpty],
},
AzureOpenAiModelType: {
envKey: "AZURE_OPENAI_MODEL_TYPE",
checks: [
(input) =>
["default", "reasoning"].includes(input)
? null
: "Invalid model type. Must be one of: default, reasoning.",
],
},
// Anthropic Settings
AnthropicApiKey: {