mirror of
https://github.com/Mintplex-Labs/anything-llm.git
synced 2025-03-15 22:52:23 +00:00
Passthrough agentModel for LMStudio (#2499)
This commit is contained in:
parent
ab6f03ce1c
commit
7342839e77
3 changed files with 13 additions and 5 deletions
server/utils
|
@ -5,7 +5,7 @@ const {
|
|||
|
||||
// hybrid of openAi LLM chat completion for LMStudio
|
||||
class LMStudioLLM {
|
||||
constructor(embedder = null, _modelPreference = null) {
|
||||
constructor(embedder = null, modelPreference = null) {
|
||||
if (!process.env.LMSTUDIO_BASE_PATH)
|
||||
throw new Error("No LMStudio API Base Path was set.");
|
||||
|
||||
|
@ -21,7 +21,10 @@ class LMStudioLLM {
|
|||
// and any other value will crash inferencing. So until this is patched we will
|
||||
// try to fetch the `/models` and have the user set it, or just fallback to "Loaded from Chat UI"
|
||||
// which will not impact users with <v0.2.17 and should work as well once the bug is fixed.
|
||||
this.model = process.env.LMSTUDIO_MODEL_PREF || "Loaded from Chat UI";
|
||||
this.model =
|
||||
modelPreference ||
|
||||
process.env.LMSTUDIO_MODEL_PREF ||
|
||||
"Loaded from Chat UI";
|
||||
this.limits = {
|
||||
history: this.promptWindowLimit() * 0.15,
|
||||
system: this.promptWindowLimit() * 0.15,
|
||||
|
|
|
@ -756,7 +756,7 @@ ${this.getHistory({ to: route.to })
|
|||
case "anthropic":
|
||||
return new Providers.AnthropicProvider({ model: config.model });
|
||||
case "lmstudio":
|
||||
return new Providers.LMStudioProvider({});
|
||||
return new Providers.LMStudioProvider({ model: config.model });
|
||||
case "ollama":
|
||||
return new Providers.OllamaProvider({ model: config.model });
|
||||
case "groq":
|
||||
|
|
|
@ -9,9 +9,14 @@ const UnTooled = require("./helpers/untooled.js");
|
|||
class LMStudioProvider extends InheritMultiple([Provider, UnTooled]) {
|
||||
model;
|
||||
|
||||
constructor(_config = {}) {
|
||||
/**
|
||||
*
|
||||
* @param {{model?: string}} config
|
||||
*/
|
||||
constructor(config = {}) {
|
||||
super();
|
||||
const model = process.env.LMSTUDIO_MODEL_PREF || "Loaded from Chat UI";
|
||||
const model =
|
||||
config?.model || process.env.LMSTUDIO_MODEL_PREF || "Loaded from Chat UI";
|
||||
const client = new OpenAI({
|
||||
baseURL: process.env.LMSTUDIO_BASE_PATH?.replace(/\/+$/, ""), // here is the URL to your LMStudio instance
|
||||
apiKey: null,
|
||||
|
|
Loading…
Add table
Reference in a new issue