mirror of
https://github.com/Mintplex-Labs/anything-llm.git
synced 2025-04-23 21:18:12 +00:00
parent
2914c09dd5
commit
2b17bf26a8
4 changed files with 31 additions and 13 deletions
server
|
@ -64,11 +64,15 @@ function chatEndpoints(app) {
|
|||
}
|
||||
|
||||
const result = await chatWithWorkspace(workspace, message, mode, user);
|
||||
await Telemetry.sendTelemetry("sent_chat", {
|
||||
multiUserMode: multiUserMode(response),
|
||||
LLMSelection: process.env.LLM_PROVIDER || "openai",
|
||||
VectorDbSelection: process.env.VECTOR_DB || "pinecone",
|
||||
});
|
||||
await Telemetry.sendTelemetry(
|
||||
"sent_chat",
|
||||
{
|
||||
multiUserMode: multiUserMode(response),
|
||||
LLMSelection: process.env.LLM_PROVIDER || "openai",
|
||||
VectorDbSelection: process.env.VECTOR_DB || "pinecone",
|
||||
},
|
||||
user?.id
|
||||
);
|
||||
response.status(200).json({ ...result });
|
||||
} catch (e) {
|
||||
console.error(e);
|
||||
|
|
|
@ -134,6 +134,11 @@ function systemEndpoints(app) {
|
|||
return;
|
||||
}
|
||||
|
||||
await Telemetry.sendTelemetry(
|
||||
"login_event",
|
||||
{ multiUserMode: false },
|
||||
existingUser?.id
|
||||
);
|
||||
response.status(200).json({
|
||||
valid: true,
|
||||
user: existingUser,
|
||||
|
@ -155,6 +160,7 @@ function systemEndpoints(app) {
|
|||
return;
|
||||
}
|
||||
|
||||
await Telemetry.sendTelemetry("login_event", { multiUserMode: false });
|
||||
response.status(200).json({
|
||||
valid: true,
|
||||
token: makeJWT({ p: password }, "30d"),
|
||||
|
@ -325,7 +331,9 @@ function systemEndpoints(app) {
|
|||
true
|
||||
);
|
||||
if (process.env.NODE_ENV === "production") await dumpENV();
|
||||
await Telemetry.sendTelemetry("enabled_multi_user_mode");
|
||||
await Telemetry.sendTelemetry("enabled_multi_user_mode", {
|
||||
multiUserMode: true,
|
||||
});
|
||||
response.status(200).json({ success: !!user, error });
|
||||
} catch (e) {
|
||||
await User.delete({});
|
||||
|
|
|
@ -23,11 +23,15 @@ function workspaceEndpoints(app) {
|
|||
const user = await userFromSession(request, response);
|
||||
const { name = null, onboardingComplete = false } = reqBody(request);
|
||||
const { workspace, message } = await Workspace.new(name, user?.id);
|
||||
await Telemetry.sendTelemetry("workspace_created", {
|
||||
multiUserMode: multiUserMode(response),
|
||||
LLMSelection: process.env.LLM_PROVIDER || "openai",
|
||||
VectorDbSelection: process.env.VECTOR_DB || "pinecone",
|
||||
});
|
||||
await Telemetry.sendTelemetry(
|
||||
"workspace_created",
|
||||
{
|
||||
multiUserMode: multiUserMode(response),
|
||||
LLMSelection: process.env.LLM_PROVIDER || "openai",
|
||||
VectorDbSelection: process.env.VECTOR_DB || "pinecone",
|
||||
},
|
||||
user?.id
|
||||
);
|
||||
if (onboardingComplete === true)
|
||||
await Telemetry.sendTelemetry("onboarding_complete");
|
||||
|
||||
|
|
|
@ -28,12 +28,14 @@ const Telemetry = {
|
|||
return new PostHog(this.pubkey);
|
||||
},
|
||||
|
||||
sendTelemetry: async function (event, properties = {}) {
|
||||
sendTelemetry: async function (event, properties = {}, subUserId = null) {
|
||||
try {
|
||||
const { client, distinctId } = await this.connect();
|
||||
const { client, distinctId: systemId } = await this.connect();
|
||||
if (!client) return;
|
||||
const distinctId = !!subUserId ? `${systemId}::${subUserId}` : systemId;
|
||||
console.log(`\x1b[32m[TELEMETRY SENT]\x1b[0m`, {
|
||||
event,
|
||||
distinctId,
|
||||
properties,
|
||||
});
|
||||
client.capture({
|
||||
|
|
Loading…
Add table
Reference in a new issue