make similarityResponse object arguments and not positional ()

* make `similarityResponse` object arguments and not positional

* reuse client for qdrant
This commit is contained in:
Timothy Carambat 2025-01-02 12:03:26 -08:00 committed by GitHub
parent b39f60abfb
commit bb5c3b7e0d
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
9 changed files with 60 additions and 60 deletions
server/utils
helpers/chat
vectorDbProviders
astra
chroma
lance
milvus
pinecone
qdrant
weaviate
zilliz

View file

@ -371,7 +371,7 @@ function cannonball({
*
* @param {Object} config - params to call
* @param {object} config.nDocs = fill size of the window
* @param {object} config.searchResults = vector similarityResponse results for .sources
* @param {object} config.searchResults = vector `similarityResponse` results for .sources
* @param {object[]} config.history - rawHistory of chat containing sources
* @param {string[]} config.filterIdentifiers - Pinned document identifiers to prevent duplicate context
* @returns {{

View file

@ -269,14 +269,14 @@ const AstraDB = {
}
const queryVector = await LLMConnector.embedTextInput(input);
const { contextTexts, sourceDocuments } = await this.similarityResponse(
const { contextTexts, sourceDocuments } = await this.similarityResponse({
client,
namespace,
queryVector,
similarityThreshold,
topN,
filterIdentifiers
);
filterIdentifiers,
});
const sources = sourceDocuments.map((metadata, i) => {
return { ...metadata, text: contextTexts[i] };
@ -287,14 +287,14 @@ const AstraDB = {
message: false,
};
},
similarityResponse: async function (
similarityResponse: async function ({
client,
namespace,
queryVector,
similarityThreshold = 0.25,
topN = 4,
filterIdentifiers = []
) {
filterIdentifiers = [],
}) {
const result = {
contextTexts: [],
sourceDocuments: [],

View file

@ -108,14 +108,14 @@ const Chroma = {
const namespace = await this.namespace(client, this.normalize(_namespace));
return namespace?.vectorCount || 0;
},
similarityResponse: async function (
similarityResponse: async function ({
client,
namespace,
queryVector,
similarityThreshold = 0.25,
topN = 4,
filterIdentifiers = []
) {
filterIdentifiers = [],
}) {
const collection = await client.getCollection({
name: this.normalize(namespace),
});
@ -356,14 +356,14 @@ const Chroma = {
}
const queryVector = await LLMConnector.embedTextInput(input);
const { contextTexts, sourceDocuments } = await this.similarityResponse(
const { contextTexts, sourceDocuments } = await this.similarityResponse({
client,
namespace,
queryVector,
similarityThreshold,
topN,
filterIdentifiers
);
filterIdentifiers,
});
const sources = sourceDocuments.map((metadata, i) => {
return { metadata: { ...metadata, text: contextTexts[i] } };

View file

@ -59,22 +59,23 @@ const LanceDb = {
},
/**
* Performs a SimilaritySearch on a give LanceDB namespace.
* @param {LanceClient} client
* @param {string} namespace
* @param {number[]} queryVector
* @param {number} similarityThreshold
* @param {number} topN
* @param {string[]} filterIdentifiers
* @param {Object} params
* @param {LanceClient} params.client
* @param {string} params.namespace
* @param {number[]} params.queryVector
* @param {number} params.similarityThreshold
* @param {number} params.topN
* @param {string[]} params.filterIdentifiers
* @returns
*/
similarityResponse: async function (
similarityResponse: async function ({
client,
namespace,
queryVector,
similarityThreshold = 0.25,
topN = 4,
filterIdentifiers = []
) {
filterIdentifiers = [],
}) {
const collection = await client.openTable(namespace);
const result = {
contextTexts: [],
@ -313,14 +314,14 @@ const LanceDb = {
}
const queryVector = await LLMConnector.embedTextInput(input);
const { contextTexts, sourceDocuments } = await this.similarityResponse(
const { contextTexts, sourceDocuments } = await this.similarityResponse({
client,
namespace,
queryVector,
similarityThreshold,
topN,
filterIdentifiers
);
filterIdentifiers,
});
const sources = sourceDocuments.map((metadata, i) => {
return { metadata: { ...metadata, text: contextTexts[i] } };

View file

@ -312,14 +312,14 @@ const Milvus = {
}
const queryVector = await LLMConnector.embedTextInput(input);
const { contextTexts, sourceDocuments } = await this.similarityResponse(
const { contextTexts, sourceDocuments } = await this.similarityResponse({
client,
namespace,
queryVector,
similarityThreshold,
topN,
filterIdentifiers
);
filterIdentifiers,
});
const sources = sourceDocuments.map((metadata, i) => {
return { ...metadata, text: contextTexts[i] };
@ -330,14 +330,14 @@ const Milvus = {
message: false,
};
},
similarityResponse: async function (
similarityResponse: async function ({
client,
namespace,
queryVector,
similarityThreshold = 0.25,
topN = 4,
filterIdentifiers = []
) {
filterIdentifiers = [],
}) {
const result = {
contextTexts: [],
sourceDocuments: [],

View file

@ -36,21 +36,21 @@ const PineconeDB = {
const namespace = await this.namespace(pineconeIndex, _namespace);
return namespace?.recordCount || 0;
},
similarityResponse: async function (
index,
similarityResponse: async function ({
client,
namespace,
queryVector,
similarityThreshold = 0.25,
topN = 4,
filterIdentifiers = []
) {
filterIdentifiers = [],
}) {
const result = {
contextTexts: [],
sourceDocuments: [],
scores: [],
};
const pineconeNamespace = index.namespace(namespace);
const pineconeNamespace = client.namespace(namespace);
const response = await pineconeNamespace.query({
vector: queryVector,
topK: topN,
@ -254,14 +254,14 @@ const PineconeDB = {
);
const queryVector = await LLMConnector.embedTextInput(input);
const { contextTexts, sourceDocuments } = await this.similarityResponse(
pineconeIndex,
const { contextTexts, sourceDocuments } = await this.similarityResponse({
client: pineconeIndex,
namespace,
queryVector,
similarityThreshold,
topN,
filterIdentifiers
);
filterIdentifiers,
});
const sources = sourceDocuments.map((metadata, i) => {
return { ...metadata, text: contextTexts[i] };

View file

@ -47,15 +47,14 @@ const QDrant = {
const namespace = await this.namespace(client, _namespace);
return namespace?.vectorCount || 0;
},
similarityResponse: async function (
_client,
similarityResponse: async function ({
client,
namespace,
queryVector,
similarityThreshold = 0.25,
topN = 4,
filterIdentifiers = []
) {
const { client } = await this.connect();
filterIdentifiers = [],
}) {
const result = {
contextTexts: [],
sourceDocuments: [],
@ -338,14 +337,14 @@ const QDrant = {
}
const queryVector = await LLMConnector.embedTextInput(input);
const { contextTexts, sourceDocuments } = await this.similarityResponse(
const { contextTexts, sourceDocuments } = await this.similarityResponse({
client,
namespace,
queryVector,
similarityThreshold,
topN,
filterIdentifiers
);
filterIdentifiers,
});
const sources = sourceDocuments.map((metadata, i) => {
return { ...metadata, text: contextTexts[i] };

View file

@ -74,14 +74,14 @@ const Weaviate = {
return 0;
}
},
similarityResponse: async function (
similarityResponse: async function ({
client,
namespace,
queryVector,
similarityThreshold = 0.25,
topN = 4,
filterIdentifiers = []
) {
filterIdentifiers = [],
}) {
const result = {
contextTexts: [],
sourceDocuments: [],
@ -381,14 +381,14 @@ const Weaviate = {
}
const queryVector = await LLMConnector.embedTextInput(input);
const { contextTexts, sourceDocuments } = await this.similarityResponse(
const { contextTexts, sourceDocuments } = await this.similarityResponse({
client,
namespace,
queryVector,
similarityThreshold,
topN,
filterIdentifiers
);
filterIdentifiers,
});
const sources = sourceDocuments.map((metadata, i) => {
return { ...metadata, text: contextTexts[i] };

View file

@ -305,14 +305,14 @@ const Zilliz = {
}
const queryVector = await LLMConnector.embedTextInput(input);
const { contextTexts, sourceDocuments } = await this.similarityResponse(
const { contextTexts, sourceDocuments } = await this.similarityResponse({
client,
namespace,
queryVector,
similarityThreshold,
topN,
filterIdentifiers
);
filterIdentifiers,
});
const sources = sourceDocuments.map((metadata, i) => {
return { ...metadata, text: contextTexts[i] };
@ -323,14 +323,14 @@ const Zilliz = {
message: false,
};
},
similarityResponse: async function (
similarityResponse: async function ({
client,
namespace,
queryVector,
similarityThreshold = 0.25,
topN = 4,
filterIdentifiers = []
) {
filterIdentifiers = [],
}) {
const result = {
contextTexts: [],
sourceDocuments: [],