Add multimodality support ()

* Add multimodality support

* Add Bedrock, KoboldCpp,LocalAI,and TextWebGenUI multi-modal

* temp dev build

* patch bad import

* noscrolls for windows dnd

* noscrolls for windows dnd

* update README

* update README

* add multimodal check
This commit is contained in:
Timothy Carambat 2024-07-31 10:47:49 -07:00 committed by GitHub
parent 20cd6b7481
commit 38fc181238
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
28 changed files with 713 additions and 91 deletions
.github/workflows
README.md
frontend/src
components/WorkspaceChat
ChatContainer
ChatHistory
HistoricalMessage
Actions/EditMessage
index.jsx
index.jsx
DnDWrapper
PromptInput
AttachItem
Attachments
index.jsx
index.jsx
index.jsx
models
server
endpoints
utils
AiProviders
anthropic
bedrock
gemini
koboldCPP
liteLLM
lmStudio
localAi
ollama
openAi
openRouter
textGenWebUI
chats
helpers

View file

@ -6,7 +6,7 @@ concurrency:
on:
push:
branches: ['vex'] # put your current branch to create a build. Core team only.
branches: ['558-multi-modal-support'] # put your current branch to create a build. Core team only.
paths-ignore:
- '**.md'
- 'cloud-deployments/*'

View file

@ -53,19 +53,19 @@ AnythingLLM is a full-stack application where you can use commercial off-the-she
AnythingLLM divides your documents into objects called `workspaces`. A Workspace functions a lot like a thread, but with the addition of containerization of your documents. Workspaces can share documents, but they do not talk to each other so you can keep your context for each workspace clean.
Some cool features of AnythingLLM
## Cool features of AnythingLLM
- **Multi-user instance support and permissioning**
- Agents inside your workspace (browse the web, run code, etc)
- [Custom Embeddable Chat widget for your website](./embed/README.md)
- Multiple document type support (PDF, TXT, DOCX, etc)
- Manage documents in your vector database from a simple UI
- Two chat modes `conversation` and `query`. Conversation retains previous questions and amendments. Query is simple QA against your documents
- In-chat citations
- 🆕 **Multi-modal support (both closed and open-source LLMs!)**
- 👤 Multi-user instance support and permissioning _Docker version only_
- 🦾 Agents inside your workspace (browse the web, run code, etc)
- 💬 [Custom Embeddable Chat widget for your website](./embed/README.md) _Docker version only_
- 📖 Multiple document type support (PDF, TXT, DOCX, etc)
- Simple chat UI with Drag-n-Drop funcitonality and clear citations.
- 100% Cloud deployment ready.
- "Bring your own LLM" model.
- Extremely efficient cost-saving measures for managing very large documents. You'll never pay to embed a massive document or transcript more than once. 90% more cost effective than other document chatbot solutions.
- Works with all popular [closed and open-source LLM providers](#supported-llms-embedder-models-speech-models-and-vector-databases).
- Built-in cost & time-saving measures for managing very large documents compared to any other chat UI.
- Full Developer API for custom integrations!
- Much more...install and find out!
### Supported LLMs, Embedder Models, Speech models, and Vector Databases

View file

@ -69,6 +69,7 @@ export function EditMessageForm({
role,
chatId,
message,
attachments = [],
adjustTextArea,
saveChanges,
}) {
@ -77,15 +78,15 @@ export function EditMessageForm({
e.preventDefault();
const form = new FormData(e.target);
const editedMessage = form.get("editedMessage");
saveChanges({ editedMessage, chatId, role });
saveChanges({ editedMessage, chatId, role, attachments });
window.dispatchEvent(
new CustomEvent(EDIT_EVENT, { detail: { chatId, role } })
new CustomEvent(EDIT_EVENT, { detail: { chatId, role, attachments } })
);
}
function cancelEdits() {
window.dispatchEvent(
new CustomEvent(EDIT_EVENT, { detail: { chatId, role } })
new CustomEvent(EDIT_EVENT, { detail: { chatId, role, attachments } })
);
return false;
}

View file

@ -19,6 +19,7 @@ const HistoricalMessage = ({
role,
workspace,
sources = [],
attachments = [],
error = false,
feedbackScore = null,
chatId = null,
@ -92,16 +93,20 @@ const HistoricalMessage = ({
role={role}
chatId={chatId}
message={message}
attachments={attachments}
adjustTextArea={adjustTextArea}
saveChanges={saveEditedMessage}
/>
) : (
<span
className={`flex flex-col gap-y-1`}
dangerouslySetInnerHTML={{
__html: DOMPurify.sanitize(renderMarkdown(message)),
}}
/>
<div>
<span
className={`flex flex-col gap-y-1`}
dangerouslySetInnerHTML={{
__html: DOMPurify.sanitize(renderMarkdown(message)),
}}
/>
<ChatAttachments attachments={attachments} />
</div>
)}
</div>
<div className="flex gap-x-5 ml-14">
@ -160,3 +165,18 @@ export default memo(
);
}
);
function ChatAttachments({ attachments = [] }) {
if (!attachments.length) return null;
return (
<div className="flex flex-wrap gap-2">
{attachments.map((item) => (
<img
key={item.name}
src={item.contentString}
className="max-w-[300px] rounded-md"
/>
))}
</div>
);
}

View file

@ -93,7 +93,12 @@ export default function ChatHistory({
sendCommand(`${heading} ${message}`, true);
};
const saveEditedMessage = async ({ editedMessage, chatId, role }) => {
const saveEditedMessage = async ({
editedMessage,
chatId,
role,
attachments = [],
}) => {
if (!editedMessage) return; // Don't save empty edits.
// if the edit was a user message, we will auto-regenerate the response and delete all
@ -110,7 +115,7 @@ export default function ChatHistory({
updatedHistory[updatedHistory.length - 1].content = editedMessage;
// remove all edited messages after the edited message in backend
await Workspace.deleteEditedChats(workspace.slug, threadSlug, chatId);
sendCommand(editedMessage, true, updatedHistory);
sendCommand(editedMessage, true, updatedHistory, attachments);
return;
}
@ -228,6 +233,7 @@ export default function ChatHistory({
feedbackScore={props.feedbackScore}
chatId={props.chatId}
error={props.error}
attachments={props.attachments}
regenerateMessage={regenerateAssistantMessage}
isLastMessage={isLastBotReply}
saveEditedMessage={saveEditedMessage}

View file

@ -1,4 +1,4 @@
import { useState, useEffect } from "react";
import { useState, useEffect, createContext, useContext } from "react";
import { v4 } from "uuid";
import System from "@/models/system";
import { useDropzone } from "react-dropzone";
@ -6,6 +6,7 @@ import DndIcon from "./dnd-icon.png";
import Workspace from "@/models/workspace";
import useUser from "@/hooks/useUser";
export const DndUploaderContext = createContext();
export const REMOVE_ATTACHMENT_EVENT = "ATTACHMENT_REMOVE";
export const CLEAR_ATTACHMENTS_EVENT = "ATTACHMENT_CLEAR";
@ -14,13 +15,14 @@ export const CLEAR_ATTACHMENTS_EVENT = "ATTACHMENT_CLEAR";
* @typedef Attachment
* @property {string} uid - unique file id.
* @property {File} file - native File object
* @property {string|null} contentString - base64 encoded string of file
* @property {('in_progress'|'failed'|'success')} status - the automatic upload status.
* @property {string|null} error - Error message
* @property {{id:string, location:string}|null} document - uploaded document details
* @property {('attachment'|'upload')} type - The type of upload. Attachments are chat-specific, uploads go to the workspace.
*/
export default function DnDFileUploaderWrapper({ workspace, children }) {
/** @type {[Attachment[], Function]} */
export function DnDFileUploaderProvider({ workspace, children }) {
const [files, setFiles] = useState([]);
const [ready, setReady] = useState(false);
const [dragging, setDragging] = useState(false);
@ -49,7 +51,7 @@ export default function DnDFileUploaderWrapper({ workspace, children }) {
/** @type {{uid: Attachment['uid'], document: Attachment['document']}} */
const { uid, document } = event.detail;
setFiles((prev) => prev.filter((prevFile) => prevFile.uid !== uid));
if (!document.location) return;
if (!document?.location) return;
await Workspace.deleteAndUnembedFile(workspace.slug, document.location);
}
@ -60,20 +62,68 @@ export default function DnDFileUploaderWrapper({ workspace, children }) {
setFiles([]);
}
/**
* Turns files into attachments we can send as body request to backend
* for a chat.
* @returns {{name:string,mime:string,contentString:string}[]}
*/
function parseAttachments() {
return (
files
?.filter((file) => file.type === "attachment")
?.map(
(
/** @type {Attachment} */
attachment
) => {
return {
name: attachment.file.name,
mime: attachment.file.type,
contentString: attachment.contentString,
};
}
) || []
);
}
/**
* Handle dropped files.
* @param {Attachment[]} acceptedFiles
* @param {any[]} _rejections
*/
async function onDrop(acceptedFiles, _rejections) {
setDragging(false);
/** @type {Attachment[]} */
const newAccepted = acceptedFiles.map((file) => {
return {
uid: v4(),
file,
status: "in_progress",
error: null,
};
});
const newAccepted = [];
for (const file of acceptedFiles) {
if (file.type.startsWith("image/")) {
newAccepted.push({
uid: v4(),
file,
contentString: await toBase64(file),
status: "success",
error: null,
type: "attachment",
});
} else {
newAccepted.push({
uid: v4(),
file,
contentString: null,
status: "in_progress",
error: null,
type: "upload",
});
}
}
setFiles((prev) => [...prev, ...newAccepted]);
for (const attachment of newAccepted) {
// Images/attachments are chat specific.
if (attachment.type === "attachment") continue;
const formData = new FormData();
formData.append("file", attachment.file, attachment.file.name);
Workspace.uploadAndEmbedFile(workspace.slug, formData).then(
@ -100,6 +150,18 @@ export default function DnDFileUploaderWrapper({ workspace, children }) {
}
}
return (
<DndUploaderContext.Provider
value={{ files, ready, dragging, setDragging, onDrop, parseAttachments }}
>
{children}
</DndUploaderContext.Provider>
);
}
export default function DnDFileUploaderWrapper({ children }) {
const { onDrop, ready, dragging, setDragging } =
useContext(DndUploaderContext);
const { getRootProps, getInputProps } = useDropzone({
onDrop,
disabled: !ready,
@ -129,8 +191,25 @@ export default function DnDFileUploaderWrapper({ workspace, children }) {
</div>
</div>
</div>
<input {...getInputProps()} />
{children(files, setFiles)}
<input id="dnd-chat-file-uploader" {...getInputProps()} />
{children}
</div>
);
}
/**
* Convert image types into Base64 strings for requests.
* @param {File} file
* @returns {string}
*/
async function toBase64(file) {
return new Promise((resolve, reject) => {
const reader = new FileReader();
reader.onload = () => {
const base64String = reader.result.split(",")[1];
resolve(`data:${file.type};base64,${base64String}`);
};
reader.onerror = (error) => reject(error);
reader.readAsDataURL(file);
});
}

View file

@ -0,0 +1,34 @@
import { PaperclipHorizontal } from "@phosphor-icons/react";
import { Tooltip } from "react-tooltip";
/**
* This is a simple proxy component that clicks on the DnD file uploader for the user.
* @returns
*/
export default function AttachItem() {
return (
<>
<button
id="attach-item-btn"
data-tooltip-id="attach-item-btn"
data-tooltip-content="Attach a file to this chat"
aria-label="Attach a file to this chat"
type="button"
onClick={(e) => {
e?.target?.blur();
document?.getElementById("dnd-chat-file-uploader")?.click();
return;
}}
className={`border-none relative flex justify-center items-center opacity-60 hover:opacity-100 cursor-pointer`}
>
<PaperclipHorizontal className="w-6 h-6 pointer-events-none text-white rotate-90 -scale-y-100" />
<Tooltip
id="attach-item-btn"
place="top"
delayShow={300}
className="tooltip !text-xs z-99"
/>
</button>
</>
);
}

View file

@ -5,6 +5,7 @@ import {
FileDoc,
FileHtml,
FileText,
FileImage,
FilePdf,
WarningOctagon,
X,
@ -32,7 +33,7 @@ export default function AttachmentManager({ attachments }) {
* @param {{attachment: import("../../DnDWrapper").Attachment}}
*/
function AttachmentItem({ attachment }) {
const { uid, file, status, error, document } = attachment;
const { uid, file, status, error, document, type } = attachment;
const { iconBgColor, Icon } = displayFromFile(file);
function removeFileFromQueue() {
@ -106,6 +107,48 @@ function AttachmentItem({ attachment }) {
);
}
if (type === "attachment") {
return (
<>
<div
data-tooltip-id={`attachment-uid-${uid}-success`}
data-tooltip-content={`${file.name} will be attached to this prompt. It will not be embedded into the workspace permanently.`}
className={`relative h-14 px-2 py-2 flex items-center gap-x-4 rounded-lg bg-zinc-800 border border-white/20 w-[200px] group`}
>
<div className="invisible group-hover:visible absolute -top-[5px] -right-[5px] w-fit h-fit z-[10]">
<button
onClick={removeFileFromQueue}
type="button"
className="bg-zinc-700 hover:bg-red-400 rounded-full p-1 flex items-center justify-center hover:border-transparent border border-white/40"
>
<X
size={10}
className="flex-shrink-0 text-zinc-200 group-hover:text-white"
/>
</button>
</div>
<div
className={`${iconBgColor} rounded-lg flex items-center justify-center flex-shrink-0 p-1`}
>
<Icon size={30} className="text-white" />
</div>
<div className="flex flex-col w-[130px]">
<p className="text-white text-xs font-medium truncate">
{file.name}
</p>
<p className="text-white/80 text-xs font-medium">Image attached!</p>
</div>
</div>
<Tooltip
id={`attachment-uid-${uid}-success`}
place="top"
delayShow={300}
className="allm-tooltip !allm-text-xs"
/>
</>
);
}
return (
<>
<div
@ -170,6 +213,10 @@ function displayFromFile(file) {
case "c":
case "c":
return { iconBgColor: "bg-warn", Icon: FileCode };
case "png":
case "jpg":
case "jpeg":
return { iconBgColor: "bg-royalblue", Icon: FileImage };
default:
return { iconBgColor: "bg-royalblue", Icon: FileText };
}

View file

@ -14,6 +14,7 @@ import TextSizeButton from "./TextSizeMenu";
import SpeechToText from "./SpeechToText";
import { Tooltip } from "react-tooltip";
import AttachmentManager from "./Attachments";
import AttachItem from "./AttachItem";
export const PROMPT_INPUT_EVENT = "set_prompt_input";
export default function PromptInput({
@ -161,6 +162,7 @@ export default function PromptInput({
</div>
<div className="flex justify-between py-3.5">
<div className="flex gap-x-2">
<AttachItem />
<SlashCommandsButton
showing={showSlashCommand}
setShowSlashCommand={setShowSlashCommand}

View file

@ -1,6 +1,6 @@
import { useState, useEffect } from "react";
import { useState, useEffect, useContext } from "react";
import ChatHistory from "./ChatHistory";
import DnDFileUploadWrapper, { CLEAR_ATTACHMENTS_EVENT } from "./DnDWrapper";
import { CLEAR_ATTACHMENTS_EVENT, DndUploaderContext } from "./DnDWrapper";
import PromptInput, { PROMPT_INPUT_EVENT } from "./PromptInput";
import Workspace from "@/models/workspace";
import handleChat, { ABORT_STREAM_EVENT } from "@/utils/chat";
@ -13,6 +13,7 @@ import handleSocketResponse, {
AGENT_SESSION_END,
AGENT_SESSION_START,
} from "@/utils/chat/agent";
import DnDFileUploaderWrapper from "./DnDWrapper";
export default function ChatContainer({ workspace, knownHistory = [] }) {
const { threadSlug = null } = useParams();
@ -21,6 +22,7 @@ export default function ChatContainer({ workspace, knownHistory = [] }) {
const [chatHistory, setChatHistory] = useState(knownHistory);
const [socketId, setSocketId] = useState(null);
const [websocket, setWebsocket] = useState(null);
const { files, parseAttachments } = useContext(DndUploaderContext);
// Maintain state of message from whatever is in PromptInput
const handleMessageChange = (event) => {
@ -41,7 +43,11 @@ export default function ChatContainer({ workspace, knownHistory = [] }) {
if (!message || message === "") return false;
const prevChatHistory = [
...chatHistory,
{ content: message, role: "user" },
{
content: message,
role: "user",
attachments: parseAttachments(),
},
{
content: "",
role: "assistant",
@ -60,11 +66,23 @@ export default function ChatContainer({ workspace, knownHistory = [] }) {
const updatedHistory = chatHistory.slice(0, -1);
const lastUserMessage = updatedHistory.slice(-1)[0];
Workspace.deleteChats(workspace.slug, [chatId])
.then(() => sendCommand(lastUserMessage.content, true, updatedHistory))
.then(() =>
sendCommand(
lastUserMessage.content,
true,
updatedHistory,
lastUserMessage?.attachments
)
)
.catch((e) => console.error(e));
};
const sendCommand = async (command, submit = false, history = []) => {
const sendCommand = async (
command,
submit = false,
history = [],
attachments = []
) => {
if (!command || command === "") return false;
if (!submit) {
setMessageEmit(command);
@ -81,13 +99,18 @@ export default function ChatContainer({ workspace, knownHistory = [] }) {
role: "assistant",
pending: true,
userMessage: command,
attachments,
animate: true,
},
];
} else {
prevChatHistory = [
...chatHistory,
{ content: command, role: "user" },
{
content: command,
role: "user",
attachments,
},
{
content: "",
role: "assistant",
@ -123,7 +146,12 @@ export default function ChatContainer({ workspace, knownHistory = [] }) {
}
if (!promptMessage || !promptMessage?.userMessage) return false;
// If running and edit or regeneration, this history will already have attachments
// so no need to parse the current state.
const attachments = promptMessage?.attachments ?? parseAttachments();
window.dispatchEvent(new CustomEvent(CLEAR_ATTACHMENTS_EVENT));
await Workspace.multiplexStream({
workspaceSlug: workspace.slug,
threadSlug,
@ -137,6 +165,7 @@ export default function ChatContainer({ workspace, knownHistory = [] }) {
_chatHistory,
setSocketId
),
attachments,
});
return;
}
@ -218,31 +247,27 @@ export default function ChatContainer({ workspace, knownHistory = [] }) {
return (
<div
style={{ height: isMobile ? "100%" : "calc(100% - 32px)" }}
className="transition-all duration-500 relative md:ml-[2px] md:mr-[16px] md:my-[16px] md:rounded-[16px] bg-main-gradient w-full h-full overflow-y-scroll border-2 border-outline"
className="transition-all duration-500 relative md:ml-[2px] md:mr-[16px] md:my-[16px] md:rounded-[16px] bg-main-gradient w-full h-full overflow-y-scroll border-2 border-outline no-scroll"
>
{isMobile && <SidebarMobileHeader />}
<DnDFileUploadWrapper workspace={workspace}>
{(files) => (
<>
<ChatHistory
history={chatHistory}
workspace={workspace}
sendCommand={sendCommand}
updateHistory={setChatHistory}
regenerateAssistantMessage={regenerateAssistantMessage}
hasAttachments={files.length > 0}
/>
<PromptInput
submit={handleSubmit}
onChange={handleMessageChange}
inputDisabled={loadingResponse}
buttonDisabled={loadingResponse}
sendCommand={sendCommand}
attachments={files}
/>
</>
)}
</DnDFileUploadWrapper>
<DnDFileUploaderWrapper>
<ChatHistory
history={chatHistory}
workspace={workspace}
sendCommand={sendCommand}
updateHistory={setChatHistory}
regenerateAssistantMessage={regenerateAssistantMessage}
hasAttachments={files.length > 0}
/>
<PromptInput
submit={handleSubmit}
onChange={handleMessageChange}
inputDisabled={loadingResponse}
buttonDisabled={loadingResponse}
sendCommand={sendCommand}
attachments={files}
/>
</DnDFileUploaderWrapper>
</div>
);
}

View file

@ -5,6 +5,9 @@ import ChatContainer from "./ChatContainer";
import paths from "@/utils/paths";
import ModalWrapper from "../ModalWrapper";
import { useParams } from "react-router-dom";
import DnDFileUploaderWrapper, {
DnDFileUploaderProvider,
} from "./ChatContainer/DnDWrapper";
export default function WorkspaceChat({ loading, workspace }) {
const { threadSlug = null } = useParams();
@ -62,7 +65,11 @@ export default function WorkspaceChat({ loading, workspace }) {
}
setEventDelegatorForCodeSnippets();
return <ChatContainer workspace={workspace} knownHistory={history} />;
return (
<DnDFileUploaderProvider workspace={workspace}>
<ChatContainer workspace={workspace} knownHistory={history} />
</DnDFileUploaderProvider>
);
}
// Enables us to safely markdown and sanitize all responses without risk of injection

View file

@ -115,16 +115,23 @@ const Workspace = {
threadSlug = null,
prompt,
chatHandler,
attachments = [],
}) {
if (!!threadSlug)
return this.threads.streamChat(
{ workspaceSlug, threadSlug },
prompt,
chatHandler
chatHandler,
attachments
);
return this.streamChat({ slug: workspaceSlug }, prompt, chatHandler);
return this.streamChat(
{ slug: workspaceSlug },
prompt,
chatHandler,
attachments
);
},
streamChat: async function ({ slug }, message, handleChat) {
streamChat: async function ({ slug }, message, handleChat, attachments = []) {
const ctrl = new AbortController();
// Listen for the ABORT_STREAM_EVENT key to be emitted by the client
@ -138,7 +145,7 @@ const Workspace = {
await fetchEventSource(`${API_BASE}/workspace/${slug}/stream-chat`, {
method: "POST",
body: JSON.stringify({ message }),
body: JSON.stringify({ message, attachments }),
headers: baseHeaders(),
signal: ctrl.signal,
openWhenHidden: true,

View file

@ -90,7 +90,8 @@ const WorkspaceThread = {
streamChat: async function (
{ workspaceSlug, threadSlug },
message,
handleChat
handleChat,
attachments = []
) {
const ctrl = new AbortController();
@ -107,7 +108,7 @@ const WorkspaceThread = {
`${API_BASE}/workspace/${workspaceSlug}/thread/${threadSlug}/stream-chat`,
{
method: "POST",
body: JSON.stringify({ message }),
body: JSON.stringify({ message, attachments }),
headers: baseHeaders(),
signal: ctrl.signal,
openWhenHidden: true,

View file

@ -27,7 +27,7 @@ function chatEndpoints(app) {
async (request, response) => {
try {
const user = await userFromSession(request, response);
const { message } = reqBody(request);
const { message, attachments = [] } = reqBody(request);
const workspace = response.locals.workspace;
if (!message?.length) {
@ -88,13 +88,16 @@ function chatEndpoints(app) {
workspace,
message,
workspace?.chatMode,
user
user,
null,
attachments
);
await Telemetry.sendTelemetry("sent_chat", {
multiUserMode: multiUserMode(response),
LLMSelection: process.env.LLM_PROVIDER || "openai",
Embedder: process.env.EMBEDDING_ENGINE || "inherit",
VectorDbSelection: process.env.VECTOR_DB || "lancedb",
multiModal: Array.isArray(attachments) && attachments?.length !== 0,
});
await EventLogs.logEvent(
@ -131,7 +134,7 @@ function chatEndpoints(app) {
async (request, response) => {
try {
const user = await userFromSession(request, response);
const { message } = reqBody(request);
const { message, attachments = [] } = reqBody(request);
const workspace = response.locals.workspace;
const thread = response.locals.thread;
@ -196,7 +199,8 @@ function chatEndpoints(app) {
message,
workspace?.chatMode,
user,
thread
thread,
attachments
);
// If thread was renamed emit event to frontend via special `action` response.
@ -221,6 +225,7 @@ function chatEndpoints(app) {
LLMSelection: process.env.LLM_PROVIDER || "openai",
Embedder: process.env.EMBEDDING_ENGINE || "inherit",
VectorDbSelection: process.env.VECTOR_DB || "lancedb",
multiModal: Array.isArray(attachments) && attachments?.length !== 0,
});
await EventLogs.logEvent(

View file

@ -66,18 +66,50 @@ class AnthropicLLM {
return validModels.includes(modelName);
}
/**
* Generates appropriate content array for a message + attachments.
* @param {{userPrompt:string, attachments: import("../../helpers").Attachment[]}}
* @returns {string|object[]}
*/
#generateContent({ userPrompt, attachments = [] }) {
if (!attachments.length) {
return userPrompt;
}
const content = [{ type: "text", text: userPrompt }];
for (let attachment of attachments) {
content.push({
type: "image",
source: {
type: "base64",
media_type: attachment.mime,
data: attachment.contentString.split("base64,")[1],
},
});
}
return content.flat();
}
constructPrompt({
systemPrompt = "",
contextTexts = [],
chatHistory = [],
userPrompt = "",
attachments = [], // This is the specific attachment for only this prompt
}) {
const prompt = {
role: "system",
content: `${systemPrompt}${this.#appendContext(contextTexts)}`,
};
return [prompt, ...chatHistory, { role: "user", content: userPrompt }];
return [
prompt,
...chatHistory,
{
role: "user",
content: this.#generateContent({ userPrompt, attachments }),
},
];
}
async getChatCompletion(messages = null, { temperature = 0.7 }) {

View file

@ -95,21 +95,60 @@ class AWSBedrockLLM {
return true;
}
/**
* Generates appropriate content array for a message + attachments.
* @param {{userPrompt:string, attachments: import("../../helpers").Attachment[]}}
* @returns {string|object[]}
*/
#generateContent({ userPrompt, attachments = [] }) {
if (!attachments.length) {
return { content: userPrompt };
}
const content = [{ type: "text", text: userPrompt }];
for (let attachment of attachments) {
content.push({
type: "image_url",
image_url: attachment.contentString,
});
}
return { content: content.flat() };
}
/**
* Construct the user prompt for this model.
* @param {{attachments: import("../../helpers").Attachment[]}} param0
* @returns
*/
constructPrompt({
systemPrompt = "",
contextTexts = [],
chatHistory = [],
userPrompt = "",
attachments = [],
}) {
// AWS Mistral models do not support system prompts
if (this.model.startsWith("mistral"))
return [...chatHistory, { role: "user", content: userPrompt }];
return [
...chatHistory,
{
role: "user",
...this.#generateContent({ userPrompt, attachments }),
},
];
const prompt = {
role: "system",
content: `${systemPrompt}${this.#appendContext(contextTexts)}`,
};
return [prompt, ...chatHistory, { role: "user", content: userPrompt }];
return [
prompt,
...chatHistory,
{
role: "user",
...this.#generateContent({ userPrompt, attachments }),
},
];
}
async getChatCompletion(messages = null, { temperature = 0.7 }) {

View file

@ -112,11 +112,34 @@ class GeminiLLM {
return validModels.includes(modelName);
}
/**
* Generates appropriate content array for a message + attachments.
* @param {{userPrompt:string, attachments: import("../../helpers").Attachment[]}}
* @returns {string|object[]}
*/
#generateContent({ userPrompt, attachments = [] }) {
if (!attachments.length) {
return userPrompt;
}
const content = [{ text: userPrompt }];
for (let attachment of attachments) {
content.push({
inlineData: {
data: attachment.contentString.split("base64,")[1],
mimeType: attachment.mime,
},
});
}
return content.flat();
}
constructPrompt({
systemPrompt = "",
contextTexts = [],
chatHistory = [],
userPrompt = "",
attachments = [],
}) {
const prompt = {
role: "system",
@ -126,7 +149,10 @@ class GeminiLLM {
prompt,
{ role: "assistant", content: "Okay." },
...chatHistory,
{ role: "USER_PROMPT", content: userPrompt },
{
role: "USER_PROMPT",
content: this.#generateContent({ userPrompt, attachments }),
},
];
}

View file

@ -66,17 +66,52 @@ class KoboldCPPLLM {
return true;
}
/**
* Generates appropriate content array for a message + attachments.
* @param {{userPrompt:string, attachments: import("../../helpers").Attachment[]}}
* @returns {string|object[]}
*/
#generateContent({ userPrompt, attachments = [] }) {
if (!attachments.length) {
return userPrompt;
}
const content = [{ type: "text", text: userPrompt }];
for (let attachment of attachments) {
content.push({
type: "image_url",
image_url: {
url: attachment.contentString,
},
});
}
return content.flat();
}
/**
* Construct the user prompt for this model.
* @param {{attachments: import("../../helpers").Attachment[]}} param0
* @returns
*/
constructPrompt({
systemPrompt = "",
contextTexts = [],
chatHistory = [],
userPrompt = "",
attachments = [],
}) {
const prompt = {
role: "system",
content: `${systemPrompt}${this.#appendContext(contextTexts)}`,
};
return [prompt, ...chatHistory, { role: "user", content: userPrompt }];
return [
prompt,
...chatHistory,
{
role: "user",
content: this.#generateContent({ userPrompt, attachments }),
},
];
}
async getChatCompletion(messages = null, { temperature = 0.7 }) {

View file

@ -65,17 +65,52 @@ class LiteLLM {
return true;
}
/**
* Generates appropriate content array for a message + attachments.
* @param {{userPrompt:string, attachments: import("../../helpers").Attachment[]}}
* @returns {string|object[]}
*/
#generateContent({ userPrompt, attachments = [] }) {
if (!attachments.length) {
return userPrompt;
}
const content = [{ type: "text", text: userPrompt }];
for (let attachment of attachments) {
content.push({
type: "image_url",
image_url: {
url: attachment.contentString,
},
});
}
return content.flat();
}
/**
* Construct the user prompt for this model.
* @param {{attachments: import("../../helpers").Attachment[]}} param0
* @returns
*/
constructPrompt({
systemPrompt = "",
contextTexts = [],
chatHistory = [],
userPrompt = "",
attachments = [],
}) {
const prompt = {
role: "system",
content: `${systemPrompt}${this.#appendContext(contextTexts)}`,
};
return [prompt, ...chatHistory, { role: "user", content: userPrompt }];
return [
prompt,
...chatHistory,
{
role: "user",
content: this.#generateContent({ userPrompt, attachments }),
},
];
}
async getChatCompletion(messages = null, { temperature = 0.7 }) {

View file

@ -63,17 +63,53 @@ class LMStudioLLM {
return true;
}
/**
* Generates appropriate content array for a message + attachments.
* @param {{userPrompt:string, attachments: import("../../helpers").Attachment[]}}
* @returns {string|object[]}
*/
#generateContent({ userPrompt, attachments = [] }) {
if (!attachments.length) {
return userPrompt;
}
const content = [{ type: "text", text: userPrompt }];
for (let attachment of attachments) {
content.push({
type: "image_url",
image_url: {
url: attachment.contentString,
detail: "auto",
},
});
}
return content.flat();
}
/**
* Construct the user prompt for this model.
* @param {{attachments: import("../../helpers").Attachment[]}} param0
* @returns
*/
constructPrompt({
systemPrompt = "",
contextTexts = [],
chatHistory = [],
userPrompt = "",
attachments = [],
}) {
const prompt = {
role: "system",
content: `${systemPrompt}${this.#appendContext(contextTexts)}`,
};
return [prompt, ...chatHistory, { role: "user", content: userPrompt }];
return [
prompt,
...chatHistory,
{
role: "user",
content: this.#generateContent({ userPrompt, attachments }),
},
];
}
async getChatCompletion(messages = null, { temperature = 0.7 }) {

View file

@ -53,17 +53,52 @@ class LocalAiLLM {
return true;
}
/**
* Generates appropriate content array for a message + attachments.
* @param {{userPrompt:string, attachments: import("../../helpers").Attachment[]}}
* @returns {string|object[]}
*/
#generateContent({ userPrompt, attachments = [] }) {
if (!attachments.length) {
return userPrompt;
}
const content = [{ type: "text", text: userPrompt }];
for (let attachment of attachments) {
content.push({
type: "image_url",
image_url: {
url: attachment.contentString,
},
});
}
return content.flat();
}
/**
* Construct the user prompt for this model.
* @param {{attachments: import("../../helpers").Attachment[]}} param0
* @returns
*/
constructPrompt({
systemPrompt = "",
contextTexts = [],
chatHistory = [],
userPrompt = "",
attachments = [],
}) {
const prompt = {
role: "system",
content: `${systemPrompt}${this.#appendContext(contextTexts)}`,
};
return [prompt, ...chatHistory, { role: "user", content: userPrompt }];
return [
prompt,
...chatHistory,
{
role: "user",
content: this.#generateContent({ userPrompt, attachments }),
},
];
}
async getChatCompletion(messages = null, { temperature = 0.7 }) {

View file

@ -90,17 +90,50 @@ class OllamaAILLM {
return true;
}
/**
* Generates appropriate content array for a message + attachments.
* @param {{userPrompt:string, attachments: import("../../helpers").Attachment[]}}
* @returns {string|object[]}
*/
#generateContent({ userPrompt, attachments = [] }) {
if (!attachments.length) {
return { content: userPrompt };
}
const content = [{ type: "text", text: userPrompt }];
for (let attachment of attachments) {
content.push({
type: "image_url",
image_url: attachment.contentString,
});
}
return { content: content.flat() };
}
/**
* Construct the user prompt for this model.
* @param {{attachments: import("../../helpers").Attachment[]}} param0
* @returns
*/
constructPrompt({
systemPrompt = "",
contextTexts = [],
chatHistory = [],
userPrompt = "",
attachments = [],
}) {
const prompt = {
role: "system",
content: `${systemPrompt}${this.#appendContext(contextTexts)}`,
};
return [prompt, ...chatHistory, { role: "user", content: userPrompt }];
return [
prompt,
...chatHistory,
{
role: "user",
...this.#generateContent({ userPrompt, attachments }),
},
];
}
async getChatCompletion(messages = null, { temperature = 0.7 }) {

View file

@ -73,17 +73,53 @@ class OpenAiLLM {
return !!model;
}
/**
* Generates appropriate content array for a message + attachments.
* @param {{userPrompt:string, attachments: import("../../helpers").Attachment[]}}
* @returns {string|object[]}
*/
#generateContent({ userPrompt, attachments = [] }) {
if (!attachments.length) {
return userPrompt;
}
const content = [{ type: "text", text: userPrompt }];
for (let attachment of attachments) {
content.push({
type: "image_url",
image_url: {
url: attachment.contentString,
detail: "high",
},
});
}
return content.flat();
}
/**
* Construct the user prompt for this model.
* @param {{attachments: import("../../helpers").Attachment[]}} param0
* @returns
*/
constructPrompt({
systemPrompt = "",
contextTexts = [],
chatHistory = [],
userPrompt = "",
attachments = [], // This is the specific attachment for only this prompt
}) {
const prompt = {
role: "system",
content: `${systemPrompt}${this.#appendContext(contextTexts)}`,
};
return [prompt, ...chatHistory, { role: "user", content: userPrompt }];
return [
prompt,
...chatHistory,
{
role: "user",
content: this.#generateContent({ userPrompt, attachments }),
},
];
}
async getChatCompletion(messages = null, { temperature = 0.7 }) {

View file

@ -128,17 +128,49 @@ class OpenRouterLLM {
return availableModels.hasOwnProperty(model);
}
/**
* Generates appropriate content array for a message + attachments.
* @param {{userPrompt:string, attachments: import("../../helpers").Attachment[]}}
* @returns {string|object[]}
*/
#generateContent({ userPrompt, attachments = [] }) {
if (!attachments.length) {
return userPrompt;
}
const content = [{ type: "text", text: userPrompt }];
for (let attachment of attachments) {
content.push({
type: "image_url",
image_url: {
url: attachment.contentString,
detail: "auto",
},
});
}
console.log(content.flat());
return content.flat();
}
constructPrompt({
systemPrompt = "",
contextTexts = [],
chatHistory = [],
userPrompt = "",
attachments = [],
}) {
const prompt = {
role: "system",
content: `${systemPrompt}${this.#appendContext(contextTexts)}`,
};
return [prompt, ...chatHistory, { role: "user", content: userPrompt }];
return [
prompt,
...chatHistory,
{
role: "user",
content: this.#generateContent({ userPrompt, attachments }),
},
];
}
async getChatCompletion(messages = null, { temperature = 0.7 }) {

View file

@ -63,17 +63,52 @@ class TextGenWebUILLM {
return true;
}
/**
* Generates appropriate content array for a message + attachments.
* @param {{userPrompt:string, attachments: import("../../helpers").Attachment[]}}
* @returns {string|object[]}
*/
#generateContent({ userPrompt, attachments = [] }) {
if (!attachments.length) {
return userPrompt;
}
const content = [{ type: "text", text: userPrompt }];
for (let attachment of attachments) {
content.push({
type: "image_url",
image_url: {
url: attachment.contentString,
},
});
}
return content.flat();
}
/**
* Construct the user prompt for this model.
* @param {{attachments: import("../../helpers").Attachment[]}} param0
* @returns
*/
constructPrompt({
systemPrompt = "",
contextTexts = [],
chatHistory = [],
userPrompt = "",
attachments = [],
}) {
const prompt = {
role: "system",
content: `${systemPrompt}${this.#appendContext(contextTexts)}`,
};
return [prompt, ...chatHistory, { role: "user", content: userPrompt }];
return [
prompt,
...chatHistory,
{
role: "user",
content: this.#generateContent({ userPrompt, attachments }),
},
];
}
async getChatCompletion(messages = null, { temperature = 0.7 }) {

View file

@ -20,7 +20,8 @@ async function streamChatWithWorkspace(
message,
chatMode = "chat",
user = null,
thread = null
thread = null,
attachments = []
) {
const uuid = uuidv4();
const updatedMessage = await grepCommand(message, user);
@ -69,6 +70,7 @@ async function streamChatWithWorkspace(
type: "textResponse",
textResponse,
sources: [],
attachments,
close: true,
error: null,
});
@ -79,6 +81,7 @@ async function streamChatWithWorkspace(
text: textResponse,
sources: [],
type: chatMode,
attachments,
},
threadId: thread?.id || null,
include: false,
@ -195,6 +198,7 @@ async function streamChatWithWorkspace(
text: textResponse,
sources: [],
type: chatMode,
attachments,
},
threadId: thread?.id || null,
include: false,
@ -211,6 +215,7 @@ async function streamChatWithWorkspace(
userPrompt: updatedMessage,
contextTexts,
chatHistory,
attachments,
},
rawHistory
);
@ -246,7 +251,7 @@ async function streamChatWithWorkspace(
const { chat } = await WorkspaceChats.new({
workspaceId: workspace.id,
prompt: message,
response: { text: completeText, sources, type: chatMode },
response: { text: completeText, sources, type: chatMode, attachments },
threadId: thread?.id || null,
user,
});

View file

@ -71,6 +71,7 @@ function convertToChatHistory(history = []) {
role: "user",
content: prompt,
sentAt: moment(createdAt).unix(),
attachments: data?.attachments ?? [],
chatId: id,
},
{

View file

@ -1,3 +1,11 @@
/**
* File Attachment for automatic upload on the chat container page.
* @typedef Attachment
* @property {string} name - the given file name
* @property {string} mime - the given file mime
* @property {string} contentString - full base64 encoded string of file
*/
/**
* @typedef {Object} BaseLLMProvider - A basic llm provider object
* @property {Function} streamingEnabled - Checks if streaming is enabled for chat completions.