mirror of
https://github.com/Mintplex-Labs/anything-llm.git
synced 2025-04-17 18:18:11 +00:00
Add LMStudio agent support (generic) support (#1246)
* add LMStudio agent support (generic) support "work" with non-tool callable LLMs, highly dependent on system specs * add comments * enable few-shot prompting per function for OSS models
This commit is contained in:
parent
0b61ef6383
commit
1b4559f57f
19 changed files with 472 additions and 12 deletions
.vscode
frontend/src/pages/WorkspaceSettings/AgentConfig/AgentLLMSelection
server
1
.vscode/settings.json
vendored
1
.vscode/settings.json
vendored
|
@ -17,6 +17,7 @@
|
|||
"hljs",
|
||||
"inferencing",
|
||||
"Langchain",
|
||||
"lmstudio",
|
||||
"mbox",
|
||||
"Milvus",
|
||||
"Mintplex",
|
||||
|
|
|
@ -2,10 +2,11 @@ import React, { useEffect, useRef, useState } from "react";
|
|||
import AnythingLLMIcon from "@/media/logo/anything-llm-icon.png";
|
||||
import AgentLLMItem from "./AgentLLMItem";
|
||||
import { AVAILABLE_LLM_PROVIDERS } from "@/pages/GeneralSettings/LLMPreference";
|
||||
import { CaretUpDown, MagnifyingGlass, X } from "@phosphor-icons/react";
|
||||
import { CaretUpDown, Gauge, MagnifyingGlass, X } from "@phosphor-icons/react";
|
||||
import AgentModelSelection from "../AgentModelSelection";
|
||||
|
||||
const ENABLED_PROVIDERS = ["openai", "anthropic"];
|
||||
const ENABLED_PROVIDERS = ["openai", "anthropic", "lmstudio"];
|
||||
const WARN_PERFORMANCE = ["lmstudio"];
|
||||
|
||||
const LLM_DEFAULT = {
|
||||
name: "Please make a selection",
|
||||
|
@ -62,6 +63,19 @@ export default function AgentLLMSelection({
|
|||
const selectedLLMObject = LLMS.find((llm) => llm.value === selectedLLM);
|
||||
return (
|
||||
<div className="border-b border-white/40 pb-8">
|
||||
{WARN_PERFORMANCE.includes(selectedLLM) && (
|
||||
<div className="flex flex-col md:flex-row md:items-center gap-x-2 text-white mb-4 bg-blue-800/30 w-fit rounded-lg px-4 py-2">
|
||||
<div className="gap-x-2 flex items-center">
|
||||
<Gauge className="shrink-0" size={25} />
|
||||
<p className="text-sm">
|
||||
Performance of LLMs that do not explicitly support tool-calling is
|
||||
highly dependent on the model's capabilities and accuracy. Some
|
||||
abilities may be limited or non-functional.
|
||||
</p>
|
||||
</div>
|
||||
</div>
|
||||
)}
|
||||
|
||||
<div className="flex flex-col">
|
||||
<label htmlFor="name" className="block input-label">
|
||||
Workspace Agent LLM Provider
|
||||
|
|
|
@ -51,7 +51,7 @@ function agentWebsocket(app) {
|
|||
await agentHandler.createAIbitat({ socket });
|
||||
await agentHandler.startAgentCluster();
|
||||
} catch (e) {
|
||||
console.error(e.message);
|
||||
console.error(e.message, e);
|
||||
socket?.send(JSON.stringify({ type: "wssFailure", content: e.message }));
|
||||
socket?.close();
|
||||
}
|
||||
|
|
|
@ -51,6 +51,7 @@
|
|||
"joi": "^17.11.0",
|
||||
"joi-password-complexity": "^5.2.0",
|
||||
"js-tiktoken": "^1.0.7",
|
||||
"jsonrepair": "^3.7.0",
|
||||
"jsonwebtoken": "^8.5.1",
|
||||
"langchain": "0.1.36",
|
||||
"mime": "^3.0.0",
|
||||
|
|
|
@ -603,6 +603,18 @@ ${this.getHistory({ to: route.to })
|
|||
|
||||
// Execute the function and return the result to the provider
|
||||
fn.caller = byAgent || "agent";
|
||||
|
||||
// For OSS LLMs we really need to keep tabs on what they are calling
|
||||
// so we can log it here.
|
||||
if (provider?.verbose) {
|
||||
this?.introspect?.(
|
||||
`[debug]: ${fn.caller} is attempting to call \`${name}\` tool`
|
||||
);
|
||||
this.handlerProps.log(
|
||||
`[debug]: ${fn.caller} is attempting to call \`${name}\` tool`
|
||||
);
|
||||
}
|
||||
|
||||
const result = await fn.handler(args);
|
||||
Telemetry.sendTelemetry("agent_tool_call", { tool: name }, null, true);
|
||||
return await this.handleExecution(
|
||||
|
@ -727,6 +739,8 @@ ${this.getHistory({ to: route.to })
|
|||
return new Providers.OpenAIProvider({ model: config.model });
|
||||
case "anthropic":
|
||||
return new Providers.AnthropicProvider({ model: config.model });
|
||||
case "lmstudio":
|
||||
return new Providers.LMStudioProvider({});
|
||||
|
||||
default:
|
||||
throw new Error(
|
||||
|
|
|
@ -16,7 +16,37 @@ const memory = {
|
|||
tracker: new Deduplicator(),
|
||||
name: this.name,
|
||||
description:
|
||||
"Search against local documents for context that is relevant to the query or store a snippet of text into memory for retrieval later. Storing information should only be done when the user specifically requests for information to be remembered or saved to long-term memory. You should use this tool before search the internet for information.",
|
||||
"Search against local documents for context that is relevant to the query or store a snippet of text into memory for retrieval later. Storing information should only be done when the user specifically requests for information to be remembered or saved to long-term memory. You should use this tool before search the internet for information. Do not use this tool unless you are explicity told to 'remember' or 'store' information.",
|
||||
examples: [
|
||||
{
|
||||
prompt: "What is AnythingLLM?",
|
||||
call: JSON.stringify({
|
||||
action: "search",
|
||||
content: "What is AnythingLLM?",
|
||||
}),
|
||||
},
|
||||
{
|
||||
prompt: "What do you know about Plato's motives?",
|
||||
call: JSON.stringify({
|
||||
action: "search",
|
||||
content: "What are the facts about Plato's motives?",
|
||||
}),
|
||||
},
|
||||
{
|
||||
prompt: "Remember that you are a robot",
|
||||
call: JSON.stringify({
|
||||
action: "store",
|
||||
content: "I am a robot, the user told me that i am.",
|
||||
}),
|
||||
},
|
||||
{
|
||||
prompt: "Save that to memory please.",
|
||||
call: JSON.stringify({
|
||||
action: "store",
|
||||
content: "<insert summary of conversation until now>",
|
||||
}),
|
||||
},
|
||||
],
|
||||
parameters: {
|
||||
$schema: "http://json-schema.org/draft-07/schema#",
|
||||
type: "object",
|
||||
|
|
|
@ -16,6 +16,32 @@ const saveFileInBrowser = {
|
|||
name: this.name,
|
||||
description:
|
||||
"Save content to a file when the user explicity asks for a download of the file.",
|
||||
examples: [
|
||||
{
|
||||
prompt: "Save me that to a file named 'output'",
|
||||
call: JSON.stringify({
|
||||
file_content:
|
||||
"<content of the file we will write previous conversation>",
|
||||
filename: "output.txt",
|
||||
}),
|
||||
},
|
||||
{
|
||||
prompt: "Save me that to my desktop",
|
||||
call: JSON.stringify({
|
||||
file_content:
|
||||
"<content of the file we will write previous conversation>",
|
||||
filename: "<relevant filename>.txt",
|
||||
}),
|
||||
},
|
||||
{
|
||||
prompt: "Save me that to a file",
|
||||
call: JSON.stringify({
|
||||
file_content:
|
||||
"<content of the file we will write from previous conversation>",
|
||||
filename: "<descriptive filename>.txt",
|
||||
}),
|
||||
},
|
||||
],
|
||||
parameters: {
|
||||
$schema: "http://json-schema.org/draft-07/schema#",
|
||||
type: "object",
|
||||
|
|
|
@ -19,6 +19,26 @@ const docSummarizer = {
|
|||
controller: new AbortController(),
|
||||
description:
|
||||
"Can get the list of files available to search with descriptions and can select a single file to open and summarize.",
|
||||
examples: [
|
||||
{
|
||||
prompt: "Summarize example.txt",
|
||||
call: JSON.stringify({
|
||||
action: "summarize",
|
||||
document_filename: "example.txt",
|
||||
}),
|
||||
},
|
||||
{
|
||||
prompt: "What files can you see?",
|
||||
call: JSON.stringify({ action: "list", document_filename: null }),
|
||||
},
|
||||
{
|
||||
prompt: "Tell me about readme.md",
|
||||
call: JSON.stringify({
|
||||
action: "summarize",
|
||||
document_filename: "readme.md",
|
||||
}),
|
||||
},
|
||||
],
|
||||
parameters: {
|
||||
$schema: "http://json-schema.org/draft-07/schema#",
|
||||
type: "object",
|
||||
|
|
|
@ -13,7 +13,21 @@ const webBrowsing = {
|
|||
super: aibitat,
|
||||
name: this.name,
|
||||
description:
|
||||
"Searches for a given query online using a search engine.",
|
||||
"Searches for a given query using a search engine to get better results for the user query.",
|
||||
examples: [
|
||||
{
|
||||
prompt: "Who won the world series today?",
|
||||
call: JSON.stringify({ query: "Winner of today's world series" }),
|
||||
},
|
||||
{
|
||||
prompt: "What is AnythingLLM?",
|
||||
call: JSON.stringify({ query: "AnythingLLM" }),
|
||||
},
|
||||
{
|
||||
prompt: "Current AAPL stock price",
|
||||
call: JSON.stringify({ query: "AAPL stock price today" }),
|
||||
},
|
||||
],
|
||||
parameters: {
|
||||
$schema: "http://json-schema.org/draft-07/schema#",
|
||||
type: "object",
|
||||
|
|
|
@ -16,7 +16,17 @@ const webScraping = {
|
|||
name: this.name,
|
||||
controller: new AbortController(),
|
||||
description:
|
||||
"Scrapes the content of a webpage or online resource from a URL.",
|
||||
"Scrapes the content of a webpage or online resource from a provided URL.",
|
||||
examples: [
|
||||
{
|
||||
prompt: "What is useanything.com about?",
|
||||
call: JSON.stringify({ uri: "https://useanything.com" }),
|
||||
},
|
||||
{
|
||||
prompt: "Scrape https://example.com",
|
||||
call: JSON.stringify({ uri: "https://example.com" }),
|
||||
},
|
||||
],
|
||||
parameters: {
|
||||
$schema: "http://json-schema.org/draft-07/schema#",
|
||||
type: "object",
|
||||
|
@ -24,7 +34,8 @@ const webScraping = {
|
|||
url: {
|
||||
type: "string",
|
||||
format: "uri",
|
||||
description: "A web URL.",
|
||||
description:
|
||||
"A complete web address URL including protocol. Assumes https if not provided.",
|
||||
},
|
||||
},
|
||||
additionalProperties: false,
|
||||
|
|
|
@ -4,16 +4,25 @@
|
|||
|
||||
const { ChatOpenAI } = require("@langchain/openai");
|
||||
const { ChatAnthropic } = require("@langchain/anthropic");
|
||||
const DEFAULT_WORKSPACE_PROMPT =
|
||||
"You are a helpful ai assistant who can assist the user and use tools available to help answer the users prompts and questions.";
|
||||
|
||||
class Provider {
|
||||
_client;
|
||||
constructor(client) {
|
||||
if (this.constructor == Provider) {
|
||||
throw new Error("Class is of abstract type and can't be instantiated");
|
||||
return;
|
||||
}
|
||||
this._client = client;
|
||||
}
|
||||
|
||||
providerLog(text, ...args) {
|
||||
console.log(
|
||||
`\x1b[36m[AgentLLM${this?.model ? ` - ${this.model}` : ""}]\x1b[0m ${text}`,
|
||||
...args
|
||||
);
|
||||
}
|
||||
|
||||
get client() {
|
||||
return this._client;
|
||||
}
|
||||
|
@ -48,6 +57,15 @@ class Provider {
|
|||
return 8_000;
|
||||
}
|
||||
}
|
||||
|
||||
static systemPrompt(provider = null) {
|
||||
switch (provider) {
|
||||
case "lmstudio":
|
||||
return "You are a helpful ai assistant who can assist the user and use tools available to help answer the users prompts and questions. Tools will be handled by another assistant and you will simply receive their responses to help answer the user prompt - always try to answer the user's prompt the best you can with the context available to you and your general knowledge.";
|
||||
default:
|
||||
return DEFAULT_WORKSPACE_PROMPT;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = Provider;
|
||||
|
|
16
server/utils/agents/aibitat/providers/helpers/classes.js
Normal file
16
server/utils/agents/aibitat/providers/helpers/classes.js
Normal file
|
@ -0,0 +1,16 @@
|
|||
function InheritMultiple(bases = []) {
|
||||
class Bases {
|
||||
constructor() {
|
||||
bases.forEach((base) => Object.assign(this, new base()));
|
||||
}
|
||||
}
|
||||
|
||||
bases.forEach((base) => {
|
||||
Object.getOwnPropertyNames(base.prototype)
|
||||
.filter((prop) => prop != "constructor")
|
||||
.forEach((prop) => (Bases.prototype[prop] = base.prototype[prop]));
|
||||
});
|
||||
return Bases;
|
||||
}
|
||||
|
||||
module.exports = InheritMultiple;
|
168
server/utils/agents/aibitat/providers/helpers/untooled.js
Normal file
168
server/utils/agents/aibitat/providers/helpers/untooled.js
Normal file
|
@ -0,0 +1,168 @@
|
|||
const { safeJsonParse } = require("../../../../http");
|
||||
const { Deduplicator } = require("../../utils/dedupe");
|
||||
|
||||
// Useful inheritance class for a model which supports OpenAi schema for API requests
|
||||
// but does not have tool-calling or JSON output support.
|
||||
class UnTooled {
|
||||
constructor() {
|
||||
this.deduplicator = new Deduplicator();
|
||||
}
|
||||
|
||||
cleanMsgs(messages) {
|
||||
const modifiedMessages = [];
|
||||
messages.forEach((msg) => {
|
||||
if (msg.role === "function") {
|
||||
const prevMsg = modifiedMessages[modifiedMessages.length - 1].content;
|
||||
modifiedMessages[modifiedMessages.length - 1].content =
|
||||
`${prevMsg}\n${msg.content}`;
|
||||
return;
|
||||
}
|
||||
modifiedMessages.push(msg);
|
||||
});
|
||||
return modifiedMessages;
|
||||
}
|
||||
|
||||
showcaseFunctions(functions = []) {
|
||||
let output = "";
|
||||
functions.forEach((def) => {
|
||||
let shotExample = `-----------
|
||||
Function name: ${def.name}
|
||||
Function Description: ${def.description}
|
||||
Function parameters in JSON format:
|
||||
${JSON.stringify(def.parameters.properties, null, 4)}\n`;
|
||||
|
||||
if (Array.isArray(def.examples)) {
|
||||
def.examples.forEach(({ prompt, call }) => {
|
||||
shotExample += `Query: "${prompt}"\nJSON: ${call}\n`;
|
||||
});
|
||||
}
|
||||
output += `${shotExample}-----------\n`;
|
||||
});
|
||||
return output;
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if two arrays of strings or numbers have the same values
|
||||
* @param {string[]|number[]} arr1
|
||||
* @param {string[]|number[]} arr2
|
||||
* @param {Object} [opts]
|
||||
* @param {boolean} [opts.enforceOrder] - By default (false), the order of the values in the arrays doesn't matter.
|
||||
* @return {boolean}
|
||||
*/
|
||||
compareArrays(arr1, arr2, opts) {
|
||||
function vKey(i, v) {
|
||||
return (opts?.enforceOrder ? `${i}-` : "") + `${typeof v}-${v}`;
|
||||
}
|
||||
|
||||
if (arr1.length !== arr2.length) return false;
|
||||
|
||||
const d1 = {};
|
||||
const d2 = {};
|
||||
for (let i = arr1.length - 1; i >= 0; i--) {
|
||||
d1[vKey(i, arr1[i])] = true;
|
||||
d2[vKey(i, arr2[i])] = true;
|
||||
}
|
||||
|
||||
for (let i = arr1.length - 1; i >= 0; i--) {
|
||||
const v = vKey(i, arr1[i]);
|
||||
if (d1[v] !== d2[v]) return false;
|
||||
}
|
||||
|
||||
for (let i = arr2.length - 1; i >= 0; i--) {
|
||||
const v = vKey(i, arr2[i]);
|
||||
if (d1[v] !== d2[v]) return false;
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
validFuncCall(functionCall = {}, functions = []) {
|
||||
if (
|
||||
!functionCall ||
|
||||
!functionCall?.hasOwnProperty("name") ||
|
||||
!functionCall?.hasOwnProperty("arguments")
|
||||
) {
|
||||
return {
|
||||
valid: false,
|
||||
reason: "Missing name or arguments in function call.",
|
||||
};
|
||||
}
|
||||
|
||||
const foundFunc = functions.find((def) => def.name === functionCall.name);
|
||||
if (!foundFunc) {
|
||||
return { valid: false, reason: "Function name does not exist." };
|
||||
}
|
||||
|
||||
const props = Object.keys(foundFunc.parameters.properties);
|
||||
const fProps = Object.keys(functionCall.arguments);
|
||||
if (!this.compareArrays(props, fProps)) {
|
||||
return { valid: false, reason: "Invalid argument schema match." };
|
||||
}
|
||||
|
||||
return { valid: true, reason: null };
|
||||
}
|
||||
|
||||
async functionCall(messages, functions) {
|
||||
const history = [...messages].filter((msg) =>
|
||||
["user", "assistant"].includes(msg.role)
|
||||
);
|
||||
if (history[history.length - 1].role !== "user") return null;
|
||||
|
||||
const response = await this.client.chat.completions
|
||||
.create({
|
||||
model: this.model,
|
||||
temperature: 0,
|
||||
messages: [
|
||||
{
|
||||
content: `You are a program which picks the most optimal function and parameters to call.
|
||||
DO NOT HAVE TO PICK A FUNCTION IF IT WILL NOT HELP ANSWER OR FULFILL THE USER'S QUERY.
|
||||
When a function is selection, respond in JSON with no additional text.
|
||||
When there is no relevant function to call - return with a regular chat text response.
|
||||
Your task is to pick a **single** function that we will use to call, if any seem useful or relevant for the user query.
|
||||
|
||||
All JSON responses should have two keys.
|
||||
'name': this is the name of the function name to call. eg: 'web-scraper', 'rag-memory', etc..
|
||||
'arguments': this is an object with the function properties to invoke the function.
|
||||
DO NOT INCLUDE ANY OTHER KEYS IN JSON RESPONSES.
|
||||
|
||||
Here are the available tools you can use an examples of a query and response so you can understand how each one works.
|
||||
${this.showcaseFunctions(functions)}
|
||||
|
||||
Now pick a function if there is an appropriate one to use given the last user message and the given conversation so far.`,
|
||||
role: "system",
|
||||
},
|
||||
...history,
|
||||
],
|
||||
})
|
||||
.then((result) => {
|
||||
if (!result.hasOwnProperty("choices"))
|
||||
throw new Error("LMStudio chat: No results!");
|
||||
if (result.choices.length === 0)
|
||||
throw new Error("LMStudio chat: No results length!");
|
||||
return result.choices[0].message.content;
|
||||
})
|
||||
.catch((_) => {
|
||||
return null;
|
||||
});
|
||||
|
||||
const call = safeJsonParse(response, null);
|
||||
if (call === null) return { toolCall: null, text: response }; // failed to parse, so must be text.
|
||||
|
||||
const { valid, reason } = this.validFuncCall(call, functions);
|
||||
if (!valid) {
|
||||
this.providerLog(`Invalid function tool call: ${reason}.`);
|
||||
return { toolCall: null, text: null };
|
||||
}
|
||||
|
||||
if (this.deduplicator.isDuplicate(call.name, call.arguments)) {
|
||||
this.providerLog(
|
||||
`Function tool with exact arguments has already been called this stack.`
|
||||
);
|
||||
return { toolCall: null, text: null };
|
||||
}
|
||||
|
||||
return { toolCall: call, text: null };
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = UnTooled;
|
|
@ -1,7 +1,9 @@
|
|||
const OpenAIProvider = require("./openai.js");
|
||||
const AnthropicProvider = require("./anthropic.js");
|
||||
const LMStudioProvider = require("./lmstudio.js");
|
||||
|
||||
module.exports = {
|
||||
OpenAIProvider,
|
||||
AnthropicProvider,
|
||||
LMStudioProvider,
|
||||
};
|
||||
|
|
90
server/utils/agents/aibitat/providers/lmstudio.js
Normal file
90
server/utils/agents/aibitat/providers/lmstudio.js
Normal file
|
@ -0,0 +1,90 @@
|
|||
const OpenAI = require("openai");
|
||||
const Provider = require("./ai-provider.js");
|
||||
const InheritMultiple = require("./helpers/classes.js");
|
||||
const UnTooled = require("./helpers/untooled.js");
|
||||
|
||||
/**
|
||||
* The provider for the LMStudio provider.
|
||||
*/
|
||||
class LMStudioProvider extends InheritMultiple([Provider, UnTooled]) {
|
||||
model;
|
||||
|
||||
constructor(_config = {}) {
|
||||
super();
|
||||
const model = process.env.LMSTUDIO_MODEL_PREF || "Loaded from Chat UI";
|
||||
const client = new OpenAI({
|
||||
baseURL: process.env.LMSTUDIO_BASE_PATH?.replace(/\/+$/, ""), // here is the URL to your LMStudio instance
|
||||
apiKey: null,
|
||||
maxRetries: 3,
|
||||
model,
|
||||
});
|
||||
this._client = client;
|
||||
this.model = model;
|
||||
this.verbose = true;
|
||||
}
|
||||
|
||||
get client() {
|
||||
return this._client;
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a completion based on the received messages.
|
||||
*
|
||||
* @param messages A list of messages to send to the API.
|
||||
* @param functions
|
||||
* @returns The completion.
|
||||
*/
|
||||
async complete(messages, functions = null) {
|
||||
try {
|
||||
let completion;
|
||||
if (functions.length > 0) {
|
||||
const { toolCall, text } = await this.functionCall(messages, functions);
|
||||
|
||||
if (toolCall !== null) {
|
||||
this.providerLog(`Valid tool call found - running ${toolCall.name}.`);
|
||||
this.deduplicator.trackRun(toolCall.name, toolCall.arguments);
|
||||
return {
|
||||
result: null,
|
||||
functionCall: {
|
||||
name: toolCall.name,
|
||||
arguments: toolCall.arguments,
|
||||
},
|
||||
cost: 0,
|
||||
};
|
||||
}
|
||||
completion = { content: text };
|
||||
}
|
||||
|
||||
if (!completion?.content) {
|
||||
this.providerLog(
|
||||
"Will assume chat completion without tool call inputs."
|
||||
);
|
||||
const response = await this.client.chat.completions.create({
|
||||
model: this.model,
|
||||
messages: this.cleanMsgs(messages),
|
||||
});
|
||||
completion = response.choices[0].message;
|
||||
}
|
||||
|
||||
return {
|
||||
result: completion.content,
|
||||
cost: 0,
|
||||
};
|
||||
} catch (error) {
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the cost of the completion.
|
||||
*
|
||||
* @param _usage The completion to get the cost for.
|
||||
* @returns The cost of the completion.
|
||||
* Stubbed since LMStudio has no cost basis.
|
||||
*/
|
||||
getCost(_usage) {
|
||||
return 0;
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = LMStudioProvider;
|
|
@ -1,6 +1,7 @@
|
|||
const AgentPlugins = require("./aibitat/plugins");
|
||||
const { SystemSettings } = require("../../models/systemSettings");
|
||||
const { safeJsonParse } = require("../http");
|
||||
const Provider = require("./aibitat/providers/ai-provider");
|
||||
|
||||
const USER_AGENT = {
|
||||
name: "USER",
|
||||
|
@ -14,7 +15,7 @@ const USER_AGENT = {
|
|||
|
||||
const WORKSPACE_AGENT = {
|
||||
name: "@agent",
|
||||
getDefinition: async () => {
|
||||
getDefinition: async (provider = null) => {
|
||||
const defaultFunctions = [
|
||||
AgentPlugins.memory.name, // RAG
|
||||
AgentPlugins.docSummarizer.name, // Doc Summary
|
||||
|
@ -30,7 +31,7 @@ const WORKSPACE_AGENT = {
|
|||
});
|
||||
|
||||
return {
|
||||
role: "You are a helpful ai assistant who can assist the user and use tools available to help answer the users prompts and questions.",
|
||||
role: Provider.systemPrompt(provider),
|
||||
functions: defaultFunctions,
|
||||
};
|
||||
},
|
||||
|
|
|
@ -77,14 +77,32 @@ class AgentHandler {
|
|||
if (!process.env.ANTHROPIC_API_KEY)
|
||||
throw new Error("Anthropic API key must be provided to use agents.");
|
||||
break;
|
||||
case "lmstudio":
|
||||
if (!process.env.LMSTUDIO_BASE_PATH)
|
||||
throw new Error("LMStudio bash path must be provided to use agents.");
|
||||
break;
|
||||
default:
|
||||
throw new Error("No provider found to power agent cluster.");
|
||||
}
|
||||
}
|
||||
|
||||
#providerDefault() {
|
||||
switch (this.provider) {
|
||||
case "openai":
|
||||
return "gpt-3.5-turbo";
|
||||
case "anthropic":
|
||||
return "claude-3-sonnet-20240229";
|
||||
case "lmstudio":
|
||||
return "server-default";
|
||||
default:
|
||||
return "unknown";
|
||||
}
|
||||
}
|
||||
|
||||
#providerSetupAndCheck() {
|
||||
this.provider = this.invocation.workspace.agentProvider || "openai";
|
||||
this.model = this.invocation.workspace.agentModel || "gpt-3.5-turbo";
|
||||
this.model =
|
||||
this.invocation.workspace.agentModel || this.#providerDefault();
|
||||
this.log(`Start ${this.#invocationUUID}::${this.provider}:${this.model}`);
|
||||
this.#checkSetup();
|
||||
}
|
||||
|
@ -137,7 +155,7 @@ class AgentHandler {
|
|||
this.aibitat.agent(USER_AGENT.name, await USER_AGENT.getDefinition());
|
||||
this.aibitat.agent(
|
||||
WORKSPACE_AGENT.name,
|
||||
await WORKSPACE_AGENT.getDefinition()
|
||||
await WORKSPACE_AGENT.getDefinition(this.provider)
|
||||
);
|
||||
|
||||
this.#funcsToLoad = [
|
||||
|
|
|
@ -3,6 +3,7 @@ process.env.NODE_ENV === "development"
|
|||
: require("dotenv").config();
|
||||
const JWT = require("jsonwebtoken");
|
||||
const { User } = require("../../models/user");
|
||||
const { jsonrepair } = require("jsonrepair");
|
||||
|
||||
function reqBody(request) {
|
||||
return typeof request.body === "string"
|
||||
|
@ -65,6 +66,16 @@ function safeJsonParse(jsonString, fallback = null) {
|
|||
try {
|
||||
return JSON.parse(jsonString);
|
||||
} catch {}
|
||||
|
||||
// If the jsonString does not look like an Obj or Array, dont attempt
|
||||
// to repair it.
|
||||
if (jsonString?.startsWith("[") || jsonString?.startsWith("{")) {
|
||||
try {
|
||||
const repairedJson = jsonrepair(jsonString);
|
||||
return JSON.parse(repairedJson);
|
||||
} catch {}
|
||||
}
|
||||
|
||||
return fallback;
|
||||
}
|
||||
|
||||
|
|
|
@ -3743,6 +3743,11 @@ jsonpointer@^5.0.1:
|
|||
resolved "https://registry.yarnpkg.com/jsonpointer/-/jsonpointer-5.0.1.tgz#2110e0af0900fd37467b5907ecd13a7884a1b559"
|
||||
integrity sha512-p/nXbhSEcu3pZRdkW1OfJhpsVtW1gd4Wa1fnQc9YLiTfAjn0312eMKimbdIQzuZl9aa9xUGaRlP9T/CJE/ditQ==
|
||||
|
||||
jsonrepair@^3.7.0:
|
||||
version "3.7.0"
|
||||
resolved "https://registry.yarnpkg.com/jsonrepair/-/jsonrepair-3.7.0.tgz#b4fddb9c8d29dd62263f4f037334099e28feac21"
|
||||
integrity sha512-TwE50n4P4gdVfMQF2q+X+IGy4ntFfcuHHE8zjRyBcdtrRK0ORZsjOZD6zmdylk4p277nQBAlHgsEPWtMIQk4LQ==
|
||||
|
||||
jsonwebtoken@^8.5.1:
|
||||
version "8.5.1"
|
||||
resolved "https://registry.yarnpkg.com/jsonwebtoken/-/jsonwebtoken-8.5.1.tgz#00e71e0b8df54c2121a1f26137df2280673bcc0d"
|
||||
|
|
Loading…
Add table
Reference in a new issue