add embedding engine to telem

This commit is contained in:
timothycarambat 2023-12-07 08:53:37 -08:00
parent ef79649891
commit 33de34f8dc
4 changed files with 7 additions and 0 deletions
server
endpoints
models

View file

@ -65,6 +65,7 @@ function apiWorkspaceEndpoints(app) {
await Telemetry.sendTelemetry("workspace_created", {
multiUserMode: multiUserMode(response),
LLMSelection: process.env.LLM_PROVIDER || "openai",
Embedder: process.env.EMBEDDING_ENGINE || "inherit",
VectorDbSelection: process.env.VECTOR_DB || "pinecone",
});
response.status(200).json({ workspace, message });
@ -488,6 +489,7 @@ function apiWorkspaceEndpoints(app) {
const result = await chatWithWorkspace(workspace, message, mode);
await Telemetry.sendTelemetry("sent_chat", {
LLMSelection: process.env.LLM_PROVIDER || "openai",
Embedder: process.env.EMBEDDING_ENGINE || "inherit",
VectorDbSelection: process.env.VECTOR_DB || "pinecone",
});
response.status(200).json({ ...result });

View file

@ -77,6 +77,7 @@ function chatEndpoints(app) {
await Telemetry.sendTelemetry("sent_chat", {
multiUserMode: multiUserMode(response),
LLMSelection: process.env.LLM_PROVIDER || "openai",
Embedder: process.env.EMBEDDING_ENGINE || "inherit",
VectorDbSelection: process.env.VECTOR_DB || "pinecone",
});
response.end();
@ -154,6 +155,7 @@ function chatEndpoints(app) {
{
multiUserMode: multiUserMode(response),
LLMSelection: process.env.LLM_PROVIDER || "openai",
Embedder: process.env.EMBEDDING_ENGINE || "inherit",
VectorDbSelection: process.env.VECTOR_DB || "pinecone",
},
user?.id

View file

@ -32,6 +32,7 @@ function workspaceEndpoints(app) {
{
multiUserMode: multiUserMode(response),
LLMSelection: process.env.LLM_PROVIDER || "openai",
Embedder: process.env.EMBEDDING_ENGINE || "inherit",
VectorDbSelection: process.env.VECTOR_DB || "pinecone",
},
user?.id

View file

@ -74,6 +74,7 @@ const Document = {
await Telemetry.sendTelemetry("documents_embedded_in_workspace", {
LLMSelection: process.env.LLM_PROVIDER || "openai",
Embedder: process.env.EMBEDDING_ENGINE || "inherit",
VectorDbSelection: process.env.VECTOR_DB || "pinecone",
});
return { failed: failedToEmbed, embedded };
@ -105,6 +106,7 @@ const Document = {
await Telemetry.sendTelemetry("documents_removed_in_workspace", {
LLMSelection: process.env.LLM_PROVIDER || "openai",
Embedder: process.env.EMBEDDING_ENGINE || "inherit",
VectorDbSelection: process.env.VECTOR_DB || "pinecone",
});
return true;