mirror of
https://github.com/Mintplex-Labs/anything-llm.git
synced 2025-03-13 05:32:24 +00:00
Compare commits
3 commits
7fb441d0ae
...
3d9564688b
Author | SHA1 | Date | |
---|---|---|---|
|
3d9564688b | ||
|
0e7fee41ca | ||
|
727a54f4e5 |
10 changed files with 182 additions and 11 deletions
|
@ -11,7 +11,7 @@ export default function AgentFlowsList({
|
|||
<div className="text-theme-text-secondary text-center text-xs flex flex-col gap-y-2">
|
||||
<p>No agent flows found</p>
|
||||
<a
|
||||
href="https://docs.anythingllm.com/agent-flows/create-your-first-flow"
|
||||
href="https://docs.anythingllm.com/agent-flows/getting-started"
|
||||
target="_blank"
|
||||
className="text-theme-text-secondary underline hover:text-cta-button"
|
||||
>
|
||||
|
|
|
@ -11,10 +11,29 @@ const WorkspaceChats = {
|
|||
apiSessionId = null,
|
||||
}) {
|
||||
try {
|
||||
let promptString;
|
||||
if (typeof prompt === "string" || prompt instanceof String) {
|
||||
promptString = prompt;
|
||||
} else if (Array.isArray(prompt)) {
|
||||
promptString = prompt
|
||||
.map((x) => {
|
||||
if (typeof x.text === "string" || x.text instanceof String) {
|
||||
return x.text;
|
||||
} else if (typeof x === "string" || x instanceof String) {
|
||||
return x;
|
||||
} else {
|
||||
return JSON.stringify(x);
|
||||
}
|
||||
})
|
||||
.join("\n\n");
|
||||
} else {
|
||||
promptString = JSON.stringify(prompt);
|
||||
}
|
||||
|
||||
const chat = await prisma.workspace_chats.create({
|
||||
data: {
|
||||
workspaceId,
|
||||
prompt,
|
||||
prompt: promptString,
|
||||
response: JSON.stringify(response),
|
||||
user_id: user?.id || null,
|
||||
thread_id: threadId,
|
||||
|
|
|
@ -268,7 +268,26 @@ const AstraDB = {
|
|||
};
|
||||
}
|
||||
|
||||
const queryVector = await LLMConnector.embedTextInput(input);
|
||||
let textInput = "";
|
||||
if (typeof input === "string" || input instanceof String) {
|
||||
textInput = input;
|
||||
} else if (Array.isArray(input)) {
|
||||
textInput = [];
|
||||
for (const x of input) {
|
||||
if (typeof x.text === "string" || x.text instanceof String) {
|
||||
textInput.push(x.text);
|
||||
} else if (typeof x === "string" || x instanceof String) {
|
||||
textInput.push(x);
|
||||
}
|
||||
}
|
||||
} else {
|
||||
return {
|
||||
contextTexts: [],
|
||||
sources: [],
|
||||
message: "Invalid query - the type of input is not string or array !",
|
||||
};
|
||||
}
|
||||
const queryVector = await LLMConnector.embedTextInput(textInput);
|
||||
const { contextTexts, sourceDocuments } = await this.similarityResponse({
|
||||
client,
|
||||
namespace,
|
||||
|
|
|
@ -355,7 +355,26 @@ const Chroma = {
|
|||
};
|
||||
}
|
||||
|
||||
const queryVector = await LLMConnector.embedTextInput(input);
|
||||
let textInput = "";
|
||||
if (typeof input === "string" || input instanceof String) {
|
||||
textInput = input;
|
||||
} else if (Array.isArray(input)) {
|
||||
textInput = [];
|
||||
for (const x of input) {
|
||||
if (typeof x.text === "string" || x.text instanceof String) {
|
||||
textInput.push(x.text);
|
||||
} else if (typeof x === "string" || x instanceof String) {
|
||||
textInput.push(x);
|
||||
}
|
||||
}
|
||||
} else {
|
||||
return {
|
||||
contextTexts: [],
|
||||
sources: [],
|
||||
message: "Invalid query - the type of input is not string or array !",
|
||||
};
|
||||
}
|
||||
const queryVector = await LLMConnector.embedTextInput(textInput);
|
||||
const { contextTexts, sourceDocuments } = await this.similarityResponse({
|
||||
client,
|
||||
namespace,
|
||||
|
|
|
@ -400,12 +400,31 @@ const LanceDb = {
|
|||
};
|
||||
}
|
||||
|
||||
const queryVector = await LLMConnector.embedTextInput(input);
|
||||
let textInput = "";
|
||||
if (typeof input === "string" || input instanceof String) {
|
||||
textInput = input;
|
||||
} else if (Array.isArray(input)) {
|
||||
textInput = [];
|
||||
for (const x of input) {
|
||||
if (typeof x.text === "string" || x.text instanceof String) {
|
||||
textInput.push(x.text);
|
||||
} else if (typeof x === "string" || x instanceof String) {
|
||||
textInput.push(x);
|
||||
}
|
||||
}
|
||||
} else {
|
||||
return {
|
||||
contextTexts: [],
|
||||
sources: [],
|
||||
message: "Invalid query - the type of input is not string or array !",
|
||||
};
|
||||
}
|
||||
const queryVector = await LLMConnector.embedTextInput(textInput);
|
||||
const result = rerank
|
||||
? await this.rerankedSimilarityResponse({
|
||||
client,
|
||||
namespace,
|
||||
query: input,
|
||||
query: textInput,
|
||||
queryVector,
|
||||
similarityThreshold,
|
||||
topN,
|
||||
|
|
|
@ -311,7 +311,26 @@ const Milvus = {
|
|||
};
|
||||
}
|
||||
|
||||
const queryVector = await LLMConnector.embedTextInput(input);
|
||||
let textInput = "";
|
||||
if (typeof input === "string" || input instanceof String) {
|
||||
textInput = input;
|
||||
} else if (Array.isArray(input)) {
|
||||
textInput = [];
|
||||
for (const x of input) {
|
||||
if (typeof x.text === "string" || x.text instanceof String) {
|
||||
textInput.push(x.text);
|
||||
} else if (typeof x === "string" || x instanceof String) {
|
||||
textInput.push(x);
|
||||
}
|
||||
}
|
||||
} else {
|
||||
return {
|
||||
contextTexts: [],
|
||||
sources: [],
|
||||
message: "Invalid query - the type of input is not string or array !",
|
||||
};
|
||||
}
|
||||
const queryVector = await LLMConnector.embedTextInput(textInput);
|
||||
const { contextTexts, sourceDocuments } = await this.similarityResponse({
|
||||
client,
|
||||
namespace,
|
||||
|
|
|
@ -253,7 +253,26 @@ const PineconeDB = {
|
|||
"Invalid namespace - has it been collected and populated yet?"
|
||||
);
|
||||
|
||||
const queryVector = await LLMConnector.embedTextInput(input);
|
||||
let textInput = "";
|
||||
if (typeof input === "string" || input instanceof String) {
|
||||
textInput = input;
|
||||
} else if (Array.isArray(input)) {
|
||||
textInput = [];
|
||||
for (const x of input) {
|
||||
if (typeof x.text === "string" || x.text instanceof String) {
|
||||
textInput.push(x.text);
|
||||
} else if (typeof x === "string" || x instanceof String) {
|
||||
textInput.push(x);
|
||||
}
|
||||
}
|
||||
} else {
|
||||
return {
|
||||
contextTexts: [],
|
||||
sources: [],
|
||||
message: "Invalid query - the type of input is not string or array !",
|
||||
};
|
||||
}
|
||||
const queryVector = await LLMConnector.embedTextInput(textInput);
|
||||
const { contextTexts, sourceDocuments } = await this.similarityResponse({
|
||||
client: pineconeIndex,
|
||||
namespace,
|
||||
|
|
|
@ -336,7 +336,26 @@ const QDrant = {
|
|||
};
|
||||
}
|
||||
|
||||
const queryVector = await LLMConnector.embedTextInput(input);
|
||||
let textInput = "";
|
||||
if (typeof input === "string" || input instanceof String) {
|
||||
textInput = input;
|
||||
} else if (Array.isArray(input)) {
|
||||
textInput = [];
|
||||
for (const x of input) {
|
||||
if (typeof x.text === "string" || x.text instanceof String) {
|
||||
textInput.push(x.text);
|
||||
} else if (typeof x === "string" || x instanceof String) {
|
||||
textInput.push(x);
|
||||
}
|
||||
}
|
||||
} else {
|
||||
return {
|
||||
contextTexts: [],
|
||||
sources: [],
|
||||
message: "Invalid query - the type of input is not string or array !",
|
||||
};
|
||||
}
|
||||
const queryVector = await LLMConnector.embedTextInput(textInput);
|
||||
const { contextTexts, sourceDocuments } = await this.similarityResponse({
|
||||
client,
|
||||
namespace,
|
||||
|
|
|
@ -380,7 +380,26 @@ const Weaviate = {
|
|||
};
|
||||
}
|
||||
|
||||
const queryVector = await LLMConnector.embedTextInput(input);
|
||||
let textInput = "";
|
||||
if (typeof input === "string" || input instanceof String) {
|
||||
textInput = input;
|
||||
} else if (Array.isArray(input)) {
|
||||
textInput = [];
|
||||
for (const x of input) {
|
||||
if (typeof x.text === "string" || x.text instanceof String) {
|
||||
textInput.push(x.text);
|
||||
} else if (typeof x === "string" || x instanceof String) {
|
||||
textInput.push(x);
|
||||
}
|
||||
}
|
||||
} else {
|
||||
return {
|
||||
contextTexts: [],
|
||||
sources: [],
|
||||
message: "Invalid query - the type of input is not string or array !",
|
||||
};
|
||||
}
|
||||
const queryVector = await LLMConnector.embedTextInput(textInput);
|
||||
const { contextTexts, sourceDocuments } = await this.similarityResponse({
|
||||
client,
|
||||
namespace,
|
||||
|
|
|
@ -304,7 +304,26 @@ const Zilliz = {
|
|||
};
|
||||
}
|
||||
|
||||
const queryVector = await LLMConnector.embedTextInput(input);
|
||||
let textInput = "";
|
||||
if (typeof input === "string" || input instanceof String) {
|
||||
textInput = input;
|
||||
} else if (Array.isArray(input)) {
|
||||
textInput = [];
|
||||
for (const x of input) {
|
||||
if (typeof x.text === "string" || x.text instanceof String) {
|
||||
textInput.push(x.text);
|
||||
} else if (typeof x === "string" || x instanceof String) {
|
||||
textInput.push(x);
|
||||
}
|
||||
}
|
||||
} else {
|
||||
return {
|
||||
contextTexts: [],
|
||||
sources: [],
|
||||
message: "Invalid query - the type of input is not string or array !",
|
||||
};
|
||||
}
|
||||
const queryVector = await LLMConnector.embedTextInput(textInput);
|
||||
const { contextTexts, sourceDocuments } = await this.similarityResponse({
|
||||
client,
|
||||
namespace,
|
||||
|
|
Loading…
Add table
Reference in a new issue