Compare commits

...

16 commits

Author SHA1 Message Date
MrMarans
6cc30bfacf
Merge ed6a5346df into 741ba8f2d0 2025-01-28 16:49:17 -05:00
Timothy Carambat
741ba8f2d0
Agent UI animations ()
* wip agent ui animation

* WIP agent ui revision

* linting

* simplify css

* memoize agent responses

* patch hook memo issue

* dev build

---------

Co-authored-by: shatfield4 <seanhatfield5@gmail.com>
2025-01-28 13:46:59 -08:00
timothycarambat
6192080635 Update NVIDIA branding 2025-01-28 10:27:31 -08:00
timothycarambat
4bdd921c75 rever ubuntu arm runner - fickle and fails often 2025-01-27 17:35:30 -08:00
Timothy Carambat
6aa1854155
Add ability to disable default agent skills ()
* Add ability to disable default agent skills

* debug build
2025-01-27 16:52:43 -08:00
Sean Hatfield
75790e7e90
Remove native LLM option ()
* remove native llm

* remove node-llama-cpp from dockerfile

* remove unneeded items from dockerfile

---------

Co-authored-by: Timothy Carambat <rambat1010@gmail.com>
2025-01-27 13:42:52 -08:00
Timothy Carambat
c56d3b1558
breakout latex plugin for delims ()
* Breakout LaTeX plugin for modification

* backport regular markdown link
2025-01-27 13:35:58 -08:00
Timothy Carambat
55ffc08867
[Chore] Add better data-handling for unknown providers ()
Add better data-handling for unknown providers
2025-01-27 10:50:20 -08:00
Jason
c757c3fb5f
feat: update novita AI logo and default model () 2025-01-27 08:41:12 -08:00
Sean Hatfield
22d5891ccc
Allow editing of LLM settings from workspace settings ()
* wip change workspace llm settings

* allow editing of workspace llm and agent config inside workspace settings

* lint + put back deleted comment

---------

Co-authored-by: Timothy Carambat <rambat1010@gmail.com>
2025-01-24 14:17:52 -08:00
Timothy Carambat
2ca22abc9c
Add Version to AzureOpenAI () 2025-01-24 13:41:37 -08:00
Sean Hatfield
48dcb22b25
Dynamic fetching of TogetherAI models ()
* implement dynamic fetching of togetherai models

* implement caching for togetherai models

* update gitignore for togetherai model caching

* Remove models.json from git tracking

* Remove .cached_at from git tracking

* lint

* revert unneeded change

---------

Co-authored-by: Timothy Carambat <rambat1010@gmail.com>
2025-01-24 11:06:59 -08:00
timothycarambat
273d116586 linting 2025-01-23 16:43:18 -08:00
Sean Hatfield
57f4f46a39
Bump perplexity models ()
* bump perplexity models

---------

Co-authored-by: Timothy Carambat <rambat1010@gmail.com>
2025-01-23 16:35:38 -08:00
timothycarambat
9584a7e140 Revert StatusResponse Changes due to crashing frontend 2025-01-23 12:36:35 -08:00
Ron Metzger
ed6a5346df changed language support for following windows: chat, data-upload and account settings 2025-01-23 13:27:54 +01:00
84 changed files with 3600 additions and 2300 deletions
.github/workflows
BARE_METAL.md
docker
frontend
package.json
src
components
LLMSelection
NativeLLMOptions
NvidiaNimOptions
TogetherAiOptions
Modals/ManageWorkspace
DataConnectors
Connectors
Confluence
Github
Gitlab
WebsiteDepth
Youtube
index.jsx
Documents
Directory
UploadFile
WorkspaceDirectory
index.jsx
UserMenu/AccountModal
WorkspaceChat/ChatContainer
ChatHistory
PromptInput
AgentMenu
AttachItem
SlashCommands
SpeechToText
TextSizeMenu
index.jsx
hooks
locales
media/llmprovider
pages
Admin/Agents
GeneralSettings
LLMPreference
PrivacyAndData
OnboardingFlow/Steps
DataHandling
LLMPreference
WorkspaceSettings
AgentConfig/AgentLLMSelection/AgentLLMItem
ChatSettings/WorkspaceLLMSelection
ChatModelSelection
WorkspaceLLMItem
index.jsx
utils/chat
yarn.lock
server

View file

@ -11,7 +11,7 @@ on:
jobs:
push_multi_platform_to_registries:
name: Push Docker multi-platform image to multiple registries
runs-on: ubuntu-24.04-arm
runs-on: ubuntu-latest
permissions:
packages: write
contents: read
@ -32,6 +32,9 @@ jobs:
fi
id: dockerhub
- name: Set up QEMU
uses: docker/setup-qemu-action@v3
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v3

View file

@ -1,6 +1,9 @@
# This Github action is for publishing of the primary image for AnythingLLM
# It will publish a linux/amd64 and linux/arm64 image at the same time
# This file should ONLY BY USED FOR `master` BRANCH.
# TODO: Github now has an ubuntu-24.04-arm64 runner, but we still need
# to use QEMU to build the arm64 image because Chromium is not available for Linux arm64
# so builds will still fail, or fail much more often. Its inconsistent and frustrating.
name: Publish AnythingLLM Primary Docker image (amd64/arm64)
concurrency:
@ -25,7 +28,7 @@ on:
jobs:
push_multi_platform_to_registries:
name: Push Docker multi-platform image to multiple registries
runs-on: ubuntu-24.04-arm
runs-on: ubuntu-latest
permissions:
packages: write
contents: read
@ -46,6 +49,9 @@ jobs:
fi
id: dockerhub
- name: Set up QEMU
uses: docker/setup-qemu-action@v3
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v3
@ -125,4 +131,4 @@ jobs:
$tag
done
done
shell: bash
shell: bash

View file

@ -1,4 +1,4 @@
name: AnythingLLM Development Docker image (amd64/arm64)
name: AnythingLLM Development Docker image (amd64)
concurrency:
group: build-${{ github.ref }}
@ -6,7 +6,7 @@ concurrency:
on:
push:
branches: ['arm-runner-test'] # put your current branch to create a build. Core team only.
branches: ['agent-ui-animations'] # put your current branch to create a build. Core team only.
paths-ignore:
- '**.md'
- 'cloud-deployments/*'
@ -20,7 +20,7 @@ on:
jobs:
push_multi_platform_to_registries:
name: Push Docker multi-platform image to multiple registries
runs-on: ubuntu-24.04-arm
runs-on: ubuntu-latest
permissions:
packages: write
contents: read
@ -69,7 +69,7 @@ jobs:
push: true
sbom: true
provenance: mode=max
platforms: linux/amd64,linux/arm64
platforms: linux/amd64
tags: ${{ steps.meta.outputs.tags }}
labels: ${{ steps.meta.outputs.labels }}
cache-from: type=gha

View file

@ -5,7 +5,7 @@
> You are fully responsible for securing your deployment and data in this mode.
> **Any issues** experienced from bare-metal or non-containerized deployments will be **not** answered or supported.
Here you can find the scripts and known working process to run AnythingLLM outside of a Docker container. This method of deployment is preferable for those using local LLMs and want native performance on their devices.
Here you can find the scripts and known working process to run AnythingLLM outside of a Docker container.
### Minimum Requirements
> [!TIP]
@ -47,9 +47,6 @@ AnythingLLM is comprised of three main sections. The `frontend`, `server`, and `
2. Copy `frontend/dist` to `server/public` - `cp -R frontend/dist server/public`.
This should create a folder in `server` named `public` which contains a top level `index.html` file and various other files/folders.
_(optional)_ Build native LLM support if using `native` as your LLM.
`cd server && npx --no node-llama-cpp download`
3. Migrate and prepare your database file.
```
cd server && npx prisma generate --schema=./prisma/schema.prisma
@ -57,10 +54,10 @@ cd server && npx prisma migrate deploy --schema=./prisma/schema.prisma
```
4. Boot the server in production
`cd server && NODE_ENV=production node index.js &`
`cd server && NODE_ENV=production node index.js &`
5. Boot the collection in another process
`cd collector && NODE_ENV=production node index.js &`
`cd collector && NODE_ENV=production node index.js &`
AnythingLLM should now be running on `http://localhost:3001`!

View file

@ -91,8 +91,8 @@ GID='1000'
# LITE_LLM_API_KEY='sk-123abc'
# LLM_PROVIDER='novita'
# NOVITA_LLM_API_KEY='your-novita-api-key-here' check on https://novita.ai/settings#key-management
# NOVITA_LLM_MODEL_PREF='gryphe/mythomax-l2-13b'
# NOVITA_LLM_API_KEY='your-novita-api-key-here' check on https://novita.ai/settings/key-management
# NOVITA_LLM_MODEL_PREF='deepseek/deepseek-r1'
# LLM_PROVIDER='cohere'
# COHERE_API_KEY=

View file

@ -128,7 +128,7 @@ RUN yarn build && \
rm -rf /tmp/frontend-build
WORKDIR /app
# Install server layer & build node-llama-cpp
# Install server layer
# Also pull and build collector deps (chromium issues prevent bad bindings)
FROM build AS backend-build
COPY ./server /app/server/
@ -139,14 +139,9 @@ WORKDIR /app
# Install collector dependencies
COPY ./collector/ ./collector/
WORKDIR /app/collector
ENV PUPPETEER_DOWNLOAD_BASE_URL=https://storage.googleapis.com/chrome-for-testing-public
ENV PUPPETEER_DOWNLOAD_BASE_URL=https://storage.googleapis.com/chrome-for-testing-public
RUN yarn install --production --network-timeout 100000 && yarn cache clean
# Compile Llama.cpp bindings for node-llama-cpp for this operating system.
# Creates appropriate bindings for the OS
USER root
WORKDIR /app/server
RUN npx --no node-llama-cpp download
WORKDIR /app
USER anythingllm

View file

@ -24,7 +24,7 @@
"js-levenshtein": "^1.1.6",
"lodash.debounce": "^4.0.8",
"markdown-it": "^13.0.1",
"markdown-it-katex": "^2.0.3",
"katex": "^0.6.0",
"moment": "^2.30.1",
"onnxruntime-web": "^1.18.0",
"pluralize": "^8.0.0",

View file

@ -1,102 +0,0 @@
import { useEffect, useState } from "react";
import { Flask } from "@phosphor-icons/react";
import System from "@/models/system";
export default function NativeLLMOptions({ settings }) {
return (
<div className="w-full flex flex-col gap-y-4">
<div className="flex flex-col md:flex-row md:items-center gap-x-2 text-white mb-4 bg-orange-800/30 w-fit rounded-lg px-4 py-2">
<div className="gap-x-2 flex items-center">
<Flask size={18} />
<p className="text-sm md:text-base">
Using a locally hosted LLM is experimental. Use with caution.
</p>
</div>
</div>
<div className="w-full flex items-center gap-[36px]">
<NativeModelSelection settings={settings} />
</div>
</div>
);
}
function NativeModelSelection({ settings }) {
const [customModels, setCustomModels] = useState([]);
const [loading, setLoading] = useState(true);
useEffect(() => {
async function findCustomModels() {
setLoading(true);
const { models } = await System.customModels("native-llm", null, null);
setCustomModels(models || []);
setLoading(false);
}
findCustomModels();
}, []);
if (loading || customModels.length == 0) {
return (
<div className="flex flex-col w-60">
<label className="text-white text-sm font-semibold block mb-3">
Model Selection
</label>
<select
name="NativeLLMModelPref"
disabled={true}
className="border-none bg-theme-settings-input-bg text-white placeholder:text-theme-settings-input-placeholder text-sm rounded-lg focus:outline-primary-button active:outline-primary-button outline-none block w-full p-2.5"
>
<option disabled={true} selected={true}>
-- waiting for models --
</option>
</select>
</div>
);
}
return (
<>
<div className="flex flex-col w-60">
<label className="text-white text-sm font-semibold block mb-3">
Model Selection
</label>
<select
name="NativeLLMModelPref"
required={true}
className="border-none bg-theme-settings-input-bg text-white placeholder:text-theme-settings-input-placeholder text-sm rounded-lg focus:outline-primary-button active:outline-primary-button outline-none block w-full p-2.5"
>
{customModels.length > 0 && (
<optgroup label="Your loaded models">
{customModels.map((model) => {
return (
<option
key={model.id}
value={model.id}
selected={settings.NativeLLMModelPref === model.id}
>
{model.id}
</option>
);
})}
</optgroup>
)}
</select>
</div>
<div className="flex flex-col w-60">
<label className="text-white text-sm font-semibold block mb-3">
Token context window
</label>
<input
type="number"
name="NativeLLMTokenLimit"
className="border-none bg-theme-settings-input-bg text-white placeholder:text-theme-settings-input-placeholder text-sm rounded-lg focus:outline-primary-button active:outline-primary-button outline-none block w-full p-2.5"
placeholder="4096"
min={1}
onScroll={(e) => e.target.blur()}
defaultValue={settings?.NativeLLMTokenLimit}
required={true}
autoComplete="off"
/>
</div>
</>
);
}

View file

@ -5,7 +5,7 @@ import { NVIDIA_NIM_COMMON_URLS } from "@/utils/constants";
import { useState, useEffect } from "react";
/**
* This component is used to select a remote Nvidia NIM model endpoint
* This component is used to select a remote NVIDIA NIM model endpoint
* This is the default component and way to connect to NVIDIA NIM
* as the "managed" provider can only work in the Desktop context.
*/
@ -26,7 +26,7 @@ export default function RemoteNvidiaNimOptions({ settings }) {
<div className="flex flex-col w-60">
<div className="flex justify-between items-center mb-2">
<label className="text-white text-sm font-semibold">
Nvidia Nim Base URL
NVIDIA Nim Base URL
</label>
{loading ? (
<PreLoader size="6" />
@ -56,7 +56,7 @@ export default function RemoteNvidiaNimOptions({ settings }) {
onBlur={basePath.onBlur}
/>
<p className="text-xs leading-[18px] font-base text-white text-opacity-60 mt-2">
Enter the URL where Nvidia NIM is running.
Enter the URL where NVIDIA NIM is running.
</p>
</div>
{!settings?.credentialsOnly && (

View file

@ -2,6 +2,9 @@ import System from "@/models/system";
import { useState, useEffect } from "react";
export default function TogetherAiOptions({ settings }) {
const [inputValue, setInputValue] = useState(settings?.TogetherAiApiKey);
const [apiKey, setApiKey] = useState(settings?.TogetherAiApiKey);
return (
<div className="flex gap-[36px] mt-1.5">
<div className="flex flex-col w-60">
@ -17,37 +20,49 @@ export default function TogetherAiOptions({ settings }) {
required={true}
autoComplete="off"
spellCheck={false}
onChange={(e) => setInputValue(e.target.value)}
onBlur={() => setApiKey(inputValue)}
/>
</div>
{!settings?.credentialsOnly && (
<TogetherAiModelSelection settings={settings} />
<TogetherAiModelSelection settings={settings} apiKey={apiKey} />
)}
</div>
);
}
function TogetherAiModelSelection({ settings }) {
function TogetherAiModelSelection({ settings, apiKey }) {
const [groupedModels, setGroupedModels] = useState({});
const [loading, setLoading] = useState(true);
useEffect(() => {
async function findCustomModels() {
setLoading(true);
const { models } = await System.customModels("togetherai");
if (models?.length > 0) {
const modelsByOrganization = models.reduce((acc, model) => {
acc[model.organization] = acc[model.organization] || [];
acc[model.organization].push(model);
return acc;
}, {});
setGroupedModels(modelsByOrganization);
try {
const key = apiKey === "*".repeat(20) ? null : apiKey;
const { models } = await System.customModels("togetherai", key);
if (models?.length > 0) {
const modelsByOrganization = models.reduce((acc, model) => {
if (model.type !== "chat") return acc; // Only show chat models in dropdown
const org = model.organization || "Unknown";
acc[org] = acc[org] || [];
acc[org].push({
id: model.id,
name: model.name || model.id,
organization: org,
maxLength: model.maxLength,
});
return acc;
}, {});
setGroupedModels(modelsByOrganization);
}
} catch (error) {
console.error("Error fetching Together AI models:", error);
}
setLoading(false);
}
findCustomModels();
}, []);
}, [apiKey]);
if (loading || Object.keys(groupedModels).length === 0) {
return (

View file

@ -1,10 +1,12 @@
import { useState } from "react";
import { useTranslation } from "react-i18next";
import System from "@/models/system";
import showToast from "@/utils/toast";
import { Warning } from "@phosphor-icons/react";
import { Tooltip } from "react-tooltip";
export default function ConfluenceOptions() {
const { t } = useTranslation();
const [loading, setLoading] = useState(false);
const handleSubmit = async (e) => {
@ -59,12 +61,11 @@ export default function ConfluenceOptions() {
<div className="flex flex-col gap-y-1 mb-4">
<label className="text-white text-sm font-bold flex gap-x-2 items-center">
<p className="font-bold text-theme-text-primary">
Confluence deployment type
{t("connectors.confluence.deployment_type")}
</p>
</label>
<p className="text-xs font-normal text-theme-text-secondary">
Determine if your Confluence instance is hosted on Atlassian
cloud or self-hosted.
{t("connectors.confluence.deployment_type_explained")}
</p>
</div>
<select
@ -83,10 +84,10 @@ export default function ConfluenceOptions() {
<div className="flex flex-col pr-10">
<div className="flex flex-col gap-y-1 mb-4">
<label className="text-white text-sm font-bold flex gap-x-2 items-center">
<p className="font-bold text-white">Confluence base URL</p>
<p className="font-bold text-white">{t("connectors.confluence.base_url")}</p>
</label>
<p className="text-xs font-normal text-theme-text-secondary">
This is the base URL of your Confluence space.
{t("connectors.confluence.base_url_explained")}
</p>
</div>
<input
@ -102,11 +103,10 @@ export default function ConfluenceOptions() {
<div className="flex flex-col pr-10">
<div className="flex flex-col gap-y-1 mb-4">
<label className="text-white text-sm font-bold">
Confluence space key
{t("connectors.confluence.space_key")}
</label>
<p className="text-xs font-normal text-theme-text-secondary">
This is the spaces key of your confluence instance that will
be used. Usually begins with ~
{t("connectors.confluence.space_key_explained")}
</p>
</div>
<input
@ -122,10 +122,10 @@ export default function ConfluenceOptions() {
<div className="flex flex-col pr-10">
<div className="flex flex-col gap-y-1 mb-4">
<label className="text-white text-sm font-bold">
Confluence Username
{t("connectors.confluence.username")}
</label>
<p className="text-xs font-normal text-theme-text-secondary">
Your Confluence username.
{t("connectors.confluence.username_explained")}
</p>
</div>
<input
@ -142,7 +142,7 @@ export default function ConfluenceOptions() {
<div className="flex flex-col gap-y-1 mb-4">
<label className="text-white text-sm font-bold flex gap-x-2 items-center">
<p className="font-bold text-white">
Confluence Access Token
{t("connectors.confluence.token")}
</p>
<Warning
size={14}
@ -157,8 +157,17 @@ export default function ConfluenceOptions() {
clickable={true}
>
<p className="text-sm">
You need to provide an access token for authentication.
You can generate an access token{" "}
{t("connectors.confluence.token_explained_start")}
<a
href="https://support.atlassian.com/atlassian-account/docs/manage-api-tokens-for-your-atlassian-account/"
target="_blank"
rel="noopener noreferrer"
className="underline"
onClick={(e) => e.stopPropagation()}
>
{t("connectors.confluence.token_explained_link1")}
</a>
{t("connectors.confluence.token_explained_middle")}
<a
href="https://id.atlassian.com/manage-profile/security/api-tokens"
target="_blank"
@ -166,14 +175,14 @@ export default function ConfluenceOptions() {
className="underline"
onClick={(e) => e.stopPropagation()}
>
here
{t("connectors.confluence.token_explained_link2")}
</a>
.
{t("connectors.confluence.token_explained_end")}
</p>
</Tooltip>
</label>
<p className="text-xs font-normal text-theme-text-secondary">
Access token for authentication.
{t("connectors.confluence.token_desc")}
</p>
</div>
<input
@ -199,8 +208,7 @@ export default function ConfluenceOptions() {
</button>
{loading && (
<p className="text-xs text-theme-text-secondary">
Once complete, all pages will be available for embedding into
workspaces.
{t("connectors.confluence.task_explained")}
</p>
)}
</div>

View file

@ -1,5 +1,6 @@
import React, { useEffect, useState } from "react";
import System from "@/models/system";
import { useTranslation } from "react-i18next";
import showToast from "@/utils/toast";
import pluralize from "pluralize";
import { TagsInput } from "react-tag-input-component";
@ -8,6 +9,7 @@ import { Tooltip } from "react-tooltip";
const DEFAULT_BRANCHES = ["main", "master"];
export default function GithubOptions() {
const { t } = useTranslation();
const [loading, setLoading] = useState(false);
const [repo, setRepo] = useState(null);
const [accessToken, setAccessToken] = useState(null);
@ -68,10 +70,10 @@ export default function GithubOptions() {
<div className="flex flex-col pr-10">
<div className="flex flex-col gap-y-1 mb-4">
<label className="text-white text-sm font-bold">
GitHub Repo URL
{t('connectors.github.URL')}
</label>
<p className="text-xs font-normal text-theme-text-secondary">
Url of the GitHub repo you wish to collect.
{t('connectors.github.URL_explained')}
</p>
</div>
<input
@ -89,16 +91,19 @@ export default function GithubOptions() {
<div className="flex flex-col pr-10">
<div className="flex flex-col gap-y-1 mb-4">
<label className="text-white font-bold text-sm flex gap-x-2 items-center">
<p className="font-bold text-white">Github Access Token</p>{" "}
<p className="font-bold text-white">
{t('connectors.github.token')}
</p>{" "}
<p className="text-xs font-light flex items-center">
<span className="text-theme-text-secondary">
optional
{t('connectors.github.optional')}
</span>
<PATTooltip accessToken={accessToken} />
</p>
</label>
<p className="text-xs font-normal text-theme-text-secondary">
Access Token to prevent rate limiting.
{t('connectors.github.token_explained')}
</p>
</div>
<input
@ -122,11 +127,12 @@ export default function GithubOptions() {
<div className="flex flex-col w-full py-4 pr-10">
<div className="flex flex-col gap-y-1 mb-4">
<label className="text-white text-sm flex gap-x-2 items-center">
<p className="text-white text-sm font-bold">File Ignores</p>
<p className="text-white text-sm font-bold">
{t('connectors.github.ignores')}</p>
</label>
<p className="text-xs font-normal text-theme-text-secondary">
List in .gitignore format to ignore specific files during
collection. Press enter after each entry you want to save.
{t('connectors.github.git_ignore')}
</p>
</div>
<TagsInput
@ -154,8 +160,8 @@ export default function GithubOptions() {
</button>
{loading && (
<p className="text-xs text-white/50">
Once complete, all files will be available for embedding into
workspaces in the document picker.
{t('connectors.github.task_explained')}
</p>
)}
</div>
@ -166,6 +172,7 @@ export default function GithubOptions() {
}
function GitHubBranchSelection({ repo, accessToken }) {
const { t } = useTranslation();
const [allBranches, setAllBranches] = useState(DEFAULT_BRANCHES);
const [loading, setLoading] = useState(true);
@ -194,7 +201,9 @@ function GitHubBranchSelection({ repo, accessToken }) {
<div className="flex flex-col gap-y-1 mb-4">
<label className="text-white text-sm font-bold">Branch</label>
<p className="text-xs font-normal text-theme-text-secondary">
Branch you wish to collect files from.
{t('connectors.github.branch')}
</p>
</div>
<select
@ -203,7 +212,7 @@ function GitHubBranchSelection({ repo, accessToken }) {
className="border-none bg-theme-settings-input-bg border-gray-500 text-white focus:outline-primary-button active:outline-primary-button outline-none text-sm rounded-lg block w-full p-2.5"
>
<option disabled={true} selected={true}>
-- loading available branches --
{t('connectors.github.branch_loading')}
</option>
</select>
</div>
@ -215,7 +224,7 @@ function GitHubBranchSelection({ repo, accessToken }) {
<div className="flex flex-col gap-y-1 mb-4">
<label className="text-white text-sm font-bold">Branch</label>
<p className="text-xs font-normal text-theme-text-secondary">
Branch you wish to collect files from.
{t('connectors.github.branch_explained')}
</p>
</div>
<select
@ -236,15 +245,14 @@ function GitHubBranchSelection({ repo, accessToken }) {
}
function PATAlert({ accessToken }) {
const { t } = useTranslation();
if (!!accessToken) return null;
return (
<div className="flex flex-col md:flex-row md:items-center gap-x-2 text-white mb-4 bg-blue-800/30 w-fit rounded-lg px-4 py-2">
<div className="gap-x-2 flex items-center">
<Info className="shrink-0" size={25} />
<p className="text-sm">
Without filling out the <b>Github Access Token</b> this data connector
will only be able to collect the <b>top-level</b> files of the repo
due to GitHub's public API rate-limits.
<span dangerouslySetInnerHTML={{ __html: t("connectors.github.token_information") }} />
<br />
<br />
<a
@ -255,7 +263,8 @@ function PATAlert({ accessToken }) {
onClick={(e) => e.stopPropagation()}
>
{" "}
Get a free Personal Access Token with a GitHub account here.
{t('connectors.github.token_personal')}
</a>
</p>
</div>
@ -264,6 +273,7 @@ function PATAlert({ accessToken }) {
}
function PATTooltip({ accessToken }) {
const { t } = useTranslation();
if (!!accessToken) return null;
return (
<>
@ -282,7 +292,7 @@ function PATTooltip({ accessToken }) {
clickable={true}
>
<p className="text-sm">
Without a{" "}
{t('connectors.github.token_explained_start')}
<a
href="https://docs.github.com/en/authentication/keeping-your-account-and-data-secure/managing-your-personal-access-tokens"
rel="noreferrer"
@ -290,10 +300,9 @@ function PATTooltip({ accessToken }) {
className="underline"
onClick={(e) => e.stopPropagation()}
>
Personal Access Token
{t('connectors.github.token_explained_link1')}
</a>
, the GitHub API may limit the number of files that can be collected
due to rate limits. You can{" "}
{t('connectors.github.token_explained_middle')}
<a
href="https://github.com/settings/personal-access-tokens/new"
rel="noreferrer"
@ -301,9 +310,9 @@ function PATTooltip({ accessToken }) {
className="underline"
onClick={(e) => e.stopPropagation()}
>
create a temporary Access Token
</a>{" "}
to avoid this issue.
{t('connectors.github.token_explained_link2')}
</a>
{t('connectors.github.token_explained_end')}
</p>
</Tooltip>
</>

View file

@ -5,9 +5,11 @@ import pluralize from "pluralize";
import { TagsInput } from "react-tag-input-component";
import { Info, Warning } from "@phosphor-icons/react";
import { Tooltip } from "react-tooltip";
import { useTranslation } from "react-i18next";
const DEFAULT_BRANCHES = ["main", "master"];
export default function GitlabOptions() {
const { t } = useTranslation();
const [loading, setLoading] = useState(false);
const [repo, setRepo] = useState(null);
const [accessToken, setAccessToken] = useState(null);
@ -68,10 +70,10 @@ export default function GitlabOptions() {
<div className="flex flex-col pr-10">
<div className="flex flex-col gap-y-1 mb-4">
<label className="text-white text-sm font-bold">
GitLab Repo URL
{t("connectors.gitlab.URL")}
</label>
<p className="text-xs font-normal text-theme-text-secondary">
URL of the GitLab repo you wish to collect.
{t("connectors.gitlab.URL_explained")}
</p>
</div>
<input
@ -89,16 +91,16 @@ export default function GitlabOptions() {
<div className="flex flex-col pr-10">
<div className="flex flex-col gap-y-1 mb-4">
<label className="text-white font-bold text-sm flex gap-x-2 items-center">
<p className="font-bold text-white">GitLab Access Token</p>{" "}
<p className="font-bold text-white">{t("connectors.gitlab.token")}</p>{" "}
<p className="text-xs font-light flex items-center">
<span className="text-theme-text-secondary">
optional
{t("connectors.gitlab.optional")}
</span>
<PATTooltip accessToken={accessToken} />
</p>
</label>
<p className="text-xs font-normal text-theme-text-secondary">
Access Token to prevent rate limiting.
{t("connectors.gitlab.token_description")}
</p>
</div>
<input
@ -119,7 +121,7 @@ export default function GitlabOptions() {
<p className="font-bold text-white">Settings</p>{" "}
</label>
<p className="text-xs font-normal text-white/50">
Select additional entities to fetch from the GitLab API.
{t("connectors.gitlab.token_description")}
</p>
</div>
<div className="flex items-center gap-x-2">
@ -132,7 +134,7 @@ export default function GitlabOptions() {
/>
<div className="peer-disabled:opacity-50 pointer-events-none peer h-6 w-11 rounded-full bg-[#CFCFD0] after:absolute after:left-[2px] after:top-[2px] after:h-5 after:w-5 after:rounded-full after:shadow-xl after:border-none after:bg-white after:box-shadow-md after:transition-all after:content-[''] peer-checked:bg-[#32D583] peer-checked:after:translate-x-full peer-checked:after:border-white peer-focus:outline-none peer-focus:ring-4 peer-focus:ring-transparent"></div>
<span className="ml-3 text-sm font-medium text-white">
Fetch Issues as Documents
{t("connectors.gitlab.fetch_issues")}
</span>
</label>
</div>
@ -146,11 +148,10 @@ export default function GitlabOptions() {
<div className="flex flex-col w-full py-4 pr-10">
<div className="flex flex-col gap-y-1 mb-4">
<label className="text-white text-sm flex gap-x-2 items-center">
<p className="text-white text-sm font-bold">File Ignores</p>
<p className="text-white text-sm font-bold">{t("connectors.gitlab.ignores")}</p>
</label>
<p className="text-xs font-normal text-theme-text-secondary">
List in .gitignore format to ignore specific files during
collection. Press enter after each entry you want to save.
{t("connectors.gitlab.git_ignore")}
</p>
</div>
<TagsInput
@ -178,8 +179,7 @@ export default function GitlabOptions() {
</button>
{loading && (
<p className="text-xs text-white/50">
Once complete, all files will be available for embedding into
workspaces in the document picker.
{t("connectors.gitlab.task_explained")}
</p>
)}
</div>
@ -190,6 +190,7 @@ export default function GitlabOptions() {
}
function GitLabBranchSelection({ repo, accessToken }) {
const { t } = useTranslation();
const [allBranches, setAllBranches] = useState(DEFAULT_BRANCHES);
const [loading, setLoading] = useState(true);
@ -216,9 +217,9 @@ function GitLabBranchSelection({ repo, accessToken }) {
return (
<div className="flex flex-col w-60">
<div className="flex flex-col gap-y-1 mb-4">
<label className="text-white text-sm font-bold">Branch</label>
<label className="text-white text-sm font-bold">{t("connectors.gitlab.branch")}</label>
<p className="text-xs font-normal text-theme-text-secondary">
Branch you wish to collect files from.
{t("connectors.gitlab.branch_explained")}
</p>
</div>
<select
@ -227,7 +228,7 @@ function GitLabBranchSelection({ repo, accessToken }) {
className="border-none bg-theme-settings-input-bg border-gray-500 text-white focus:outline-primary-button active:outline-primary-button outline-none text-sm rounded-lg block w-full p-2.5"
>
<option disabled={true} selected={true}>
-- loading available branches --
{t("connectors.gitlab.branch_loading")}
</option>
</select>
</div>
@ -239,7 +240,7 @@ function GitLabBranchSelection({ repo, accessToken }) {
<div className="flex flex-col gap-y-1 mb-4">
<label className="text-white text-sm font-bold">Branch</label>
<p className="text-xs font-normal text-theme-text-secondary">
Branch you wish to collect files from.
{t("connectors.gitlab.branch_explained")}
</p>
</div>
<select
@ -260,15 +261,14 @@ function GitLabBranchSelection({ repo, accessToken }) {
}
function PATAlert({ accessToken }) {
const { t } = useTranslation();
if (!!accessToken) return null;
return (
<div className="flex flex-col md:flex-row md:items-center gap-x-2 text-white mb-4 bg-blue-800/30 w-fit rounded-lg px-4 py-2">
<div className="gap-x-2 flex items-center">
<Info className="shrink-0" size={25} />
<p className="text-sm">
Without filling out the <b>GitLab Access Token</b> this data connector
will only be able to collect the <b>top-level</b> files of the repo
due to GitLab's public API rate-limits.
<span dangerouslySetInnerHTML={{ __html: t("connectors.gitlab.token_information") }} />
<br />
<br />
<a
@ -278,8 +278,7 @@ function PATAlert({ accessToken }) {
className="underline"
onClick={(e) => e.stopPropagation()}
>
{" "}
Get a free Personal Access Token with a GitLab account here.
{t("connectors.gitlab.token_personal")}
</a>
</p>
</div>
@ -288,6 +287,7 @@ function PATAlert({ accessToken }) {
}
function PATTooltip({ accessToken }) {
const { t } = useTranslation();
if (!!accessToken) return null;
return (
<>
@ -306,7 +306,7 @@ function PATTooltip({ accessToken }) {
clickable={true}
>
<p className="text-sm">
Without a{" "}
{t('connectors.gitlab.token_explained_start')}
<a
href="https://docs.gitlab.com/ee/user/profile/personal_access_tokens.html"
rel="noreferrer"
@ -314,20 +314,19 @@ function PATTooltip({ accessToken }) {
className="underline"
onClick={(e) => e.stopPropagation()}
>
Personal Access Token
{t('connectors.gitlab.token_explained_link1')}
</a>
, the GitLab API may limit the number of files that can be collected
due to rate limits. You can{" "}
{t('connectors.gitlab.token_explained_middle')}
<a
href="https://gitlab.com/-/user_settings/personal_access_tokens"
href="https://gitlab.com/-/profile/personal_access_tokens"
rel="noreferrer"
target="_blank"
className="underline"
onClick={(e) => e.stopPropagation()}
>
create a temporary Access Token
</a>{" "}
to avoid this issue.
{t('connectors.gitlab.token_explained_link2')}
</a>
{t('connectors.gitlab.token_explained_end')}
</p>
</Tooltip>
</>

View file

@ -2,8 +2,10 @@ import React, { useState } from "react";
import System from "@/models/system";
import showToast from "@/utils/toast";
import pluralize from "pluralize";
import { useTranslation } from "react-i18next";
export default function WebsiteDepthOptions() {
const { t } = useTranslation();
const [loading, setLoading] = useState(false);
const handleSubmit = async (e) => {
@ -55,10 +57,10 @@ export default function WebsiteDepthOptions() {
<div className="flex flex-col pr-10">
<div className="flex flex-col gap-y-1 mb-4">
<label className="text-white text-sm font-bold">
Website URL
{t("connectors.website-depth.URL")}
</label>
<p className="text-xs font-normal text-theme-text-secondary">
URL of the website you want to scrape.
{t("connectors.website-depth.URL_explained")}
</p>
</div>
<input
@ -73,10 +75,9 @@ export default function WebsiteDepthOptions() {
</div>
<div className="flex flex-col pr-10">
<div className="flex flex-col gap-y-1 mb-4">
<label className="text-white text-sm font-bold">Depth</label>
<label className="text-white text-sm font-bold"> {t("connectors.website-depth.depth")}</label>
<p className="text-xs font-normal text-theme-text-secondary">
This is the number of child-links that the worker should
follow from the origin URL.
{t("connectors.website-depth.depth_explained")}
</p>
</div>
<input
@ -92,10 +93,10 @@ export default function WebsiteDepthOptions() {
<div className="flex flex-col pr-10">
<div className="flex flex-col gap-y-1 mb-4">
<label className="text-white text-sm font-bold">
Max Links
{t("connectors.website-depth.max_pages")}
</label>
<p className="text-xs font-normal text-theme-text-secondary">
Maximum number of links to scrape.
{t("connectors.website-depth.max_pages_explained")}
</p>
</div>
<input
@ -122,8 +123,7 @@ export default function WebsiteDepthOptions() {
</button>
{loading && (
<p className="text-xs text-theme-text-secondary">
Once complete, all scraped pages will be available for embedding
into workspaces in the document picker.
{t("connectors.website-depth.task_explained")}
</p>
)}
</div>

View file

@ -1,8 +1,10 @@
import React, { useState } from "react";
import System from "@/models/system";
import showToast from "@/utils/toast";
import { useTranslation } from "react-i18next";
export default function YoutubeOptions() {
const { t } = useTranslation();
const [loading, setLoading] = useState(false);
const handleSubmit = async (e) => {
@ -50,10 +52,20 @@ export default function YoutubeOptions() {
<div className="flex flex-col pr-10">
<div className="flex flex-col gap-y-1 mb-4">
<label className="text-white text-sm font-bold">
YouTube Video URL
{t("connectors.youtube.URL")}
</label>
<p className="text-xs font-normal text-theme-text-secondary">
URL of the YouTube video you wish to transcribe.
{t("connectors.youtube.URL_explained_start")}
<a
href="https://support.google.com/youtube/answer/6373554"
rel="noreferrer"
target="_blank"
className="underline"
onClick={(e) => e.stopPropagation()}
>
{t("connectors.youtube.URL_explained_link")}
</a>
{t("connectors.youtube.URL_explained_end")}
</p>
</div>
<input
@ -79,8 +91,7 @@ export default function YoutubeOptions() {
</button>
{loading && (
<p className="text-xs text-theme-text-secondary max-w-sm">
Once complete, the transcription will be available for embedding
into workspaces in the document picker.
{t("connectors.youtube.task_explained")}
</p>
)}
</div>

View file

@ -1,5 +1,6 @@
import ConnectorImages from "@/components/DataConnectorOption/media";
import { MagnifyingGlass } from "@phosphor-icons/react";
import { useTranslation } from "react-i18next";
import GithubOptions from "./Connectors/Github";
import GitlabOptions from "./Connectors/Gitlab";
import YoutubeOptions from "./Connectors/Youtube";
@ -8,45 +9,44 @@ import { useState } from "react";
import ConnectorOption from "./ConnectorOption";
import WebsiteDepthOptions from "./Connectors/WebsiteDepth";
export const DATA_CONNECTORS = {
export const getDataConnectors = (t) => ({
github: {
name: "GitHub Repo",
name: t('connectors.github.name'),
image: ConnectorImages.github,
description:
"Import an entire public or private Github repository in a single click.",
description: t('connectors.github.description'),
options: <GithubOptions />,
},
gitlab: {
name: "GitLab Repo",
name: t('connectors.gitlab.name'),
image: ConnectorImages.gitlab,
description:
"Import an entire public or private GitLab repository in a single click.",
description: t('connectors.gitlab.description'),
options: <GitlabOptions />,
},
"youtube-transcript": {
name: "YouTube Transcript",
name: t('connectors.youtube.name'),
image: ConnectorImages.youtube,
description:
"Import the transcription of an entire YouTube video from a link.",
description: t('connectors.youtube.description'),
options: <YoutubeOptions />,
},
"website-depth": {
name: "Bulk Link Scraper",
name: t('connectors.website-depth.name'),
image: ConnectorImages.websiteDepth,
description: "Scrape a website and its sub-links up to a certain depth.",
description: t('connectors.website-depth.description'),
options: <WebsiteDepthOptions />,
},
confluence: {
name: "Confluence",
name: t('connectors.confluence.name'),
image: ConnectorImages.confluence,
description: "Import an entire Confluence page in a single click.",
description: t('connectors.confluence.description'),
options: <ConfluenceOptions />,
},
};
});
export default function DataConnectors() {
const { t } = useTranslation();
const [selectedConnector, setSelectedConnector] = useState("github");
const [searchQuery, setSearchQuery] = useState("");
const DATA_CONNECTORS = getDataConnectors(t);
const filteredConnectors = Object.keys(DATA_CONNECTORS).filter((slug) =>
DATA_CONNECTORS[slug].name.toLowerCase().includes(searchQuery.toLowerCase())
@ -63,7 +63,7 @@ export default function DataConnectors() {
/>
<input
type="text"
placeholder="Search data connectors"
placeholder={t('connectors.search-placeholder')}
className="border-none z-20 pl-10 h-[38px] rounded-full w-full px-4 py-1 text-sm border-2 border-slate-300/40 outline-none focus:outline-primary-button active:outline-primary-button outline-none placeholder:text-theme-settings-input-placeholder text-white bg-theme-settings-input-bg"
autoComplete="off"
value={searchQuery}
@ -85,7 +85,7 @@ export default function DataConnectors() {
))
) : (
<div className="text-white text-center mt-4">
No data connectors found.
{t('connectors.no-connectors')}
</div>
)}
</div>

View file

@ -1,6 +1,7 @@
import UploadFile from "../UploadFile";
import PreLoader from "@/components/Preloader";
import { memo, useEffect, useState } from "react";
import { useTranslation } from "react-i18next";
import FolderRow from "./FolderRow";
import System from "@/models/system";
import { MagnifyingGlass, Plus, Trash } from "@phosphor-icons/react";
@ -30,6 +31,7 @@ function Directory({
setLoadingMessage,
loadingMessage,
}) {
const { t } = useTranslation();
const [amountSelected, setAmountSelected] = useState(0);
const [showFolderSelection, setShowFolderSelection] = useState(false);
const [searchTerm, setSearchTerm] = useState("");
@ -51,9 +53,7 @@ function Directory({
const deleteFiles = async (event) => {
event.stopPropagation();
if (
!window.confirm(
"Are you sure you want to delete these files and folders?\nThis will remove the files from the system and remove them from any existing workspaces automatically.\nThis action is not reversible."
)
!window.confirm(t('connectors.directory.delete-confirmation'))
) {
return false;
}
@ -83,7 +83,7 @@ function Directory({
setLoading(true);
setLoadingMessage(
`Removing ${toRemove.length} documents and ${foldersToRemove.length} folders. Please wait.`
t('connectors.directory.removing-message', { count: toRemove.length, folderCount: foldersToRemove.length })
);
await System.deleteDocuments(toRemove);
for (const folderName of foldersToRemove) {
@ -166,7 +166,7 @@ function Directory({
// show info if some files were not moved due to being embedded
showToast(message, "info");
} else {
showToast(`Successfully moved ${toMove.length} documents.`, "success");
showToast(t('connectors.directory.move-success', { count: toMove.length }), "success");
}
await fetchKeys(true);
setSelectedItems({});
@ -194,11 +194,11 @@ function Directory({
<div className="px-8 pb-8" onContextMenu={handleContextMenu}>
<div className="flex flex-col gap-y-6">
<div className="flex items-center justify-between w-[560px] px-5 relative">
<h3 className="text-white text-base font-bold">My Documents</h3>
<h3 className="text-white text-base font-bold">{t('connectors.directory.my-documents')}</h3>
<div className="relative">
<input
type="search"
placeholder="Search for document"
placeholder={t('connectors.directory.search-document')}
onChange={handleSearch}
className="border-none search-input bg-theme-settings-input-bg text-white placeholder:text-theme-settings-input-placeholder focus:outline-primary-button active:outline-primary-button outline-none text-sm rounded-lg pl-9 pr-2.5 py-2 w-[250px] h-[32px] light:border-theme-modal-border light:border"
/>
@ -218,7 +218,7 @@ function Directory({
className="text-theme-text-primary light:text-[#0ba5ec]"
/>
<div className="text-theme-text-primary light:text-[#0ba5ec] text-xs font-bold leading-[18px]">
New Folder
{t('connectors.directory.new-folder')}
</div>
</button>
</div>
@ -257,7 +257,7 @@ function Directory({
) : (
<div className="w-full h-full flex items-center justify-center">
<p className="text-white text-opacity-40 text-sm font-medium">
No Documents
{t('connectors.directory.no-documents')}
</p>
</div>
)}
@ -272,7 +272,7 @@ function Directory({
onMouseLeave={() => setHighlightWorkspace(false)}
className="border-none text-sm font-semibold bg-white light:bg-[#E0F2FE] h-[30px] px-2.5 rounded-lg hover:bg-neutral-800/80 hover:text-white light:text-[#026AA2] light:hover:bg-[#026AA2] light:hover:text-white"
>
Move to Workspace
{t('connectors.directory.move-workspace')}
</button>
<div className="relative">
<button

View file

@ -1,5 +1,6 @@
import { CloudArrowUp } from "@phosphor-icons/react";
import { useEffect, useState } from "react";
import { useTranslation } from "react-i18next";
import showToast from "../../../../../utils/toast";
import System from "../../../../../models/system";
import { useDropzone } from "react-dropzone";
@ -14,6 +15,7 @@ export default function UploadFile({
setLoading,
setLoadingMessage,
}) {
const { t } = useTranslation();
const [ready, setReady] = useState(false);
const [files, setFiles] = useState([]);
const [fetchingUrl, setFetchingUrl] = useState(false);
@ -90,21 +92,20 @@ export default function UploadFile({
<div className="flex flex-col items-center justify-center h-full">
<CloudArrowUp className="w-8 h-8 text-white/80 light:invert" />
<div className="text-white text-opacity-80 text-sm font-semibold py-1">
Document Processor Unavailable
{t('connectors.upload.processor-offline')}
</div>
<div className="text-white text-opacity-60 text-xs font-medium py-1 px-20 text-center">
We can't upload your files right now because the document
processor is offline. Please try again later.
{t('connectors.upload.processor-offline-desc')}
</div>
</div>
) : files.length === 0 ? (
<div className="flex flex-col items-center justify-center">
<CloudArrowUp className="w-8 h-8 text-white/80 light:invert" />
<div className="text-white text-opacity-80 text-sm font-semibold py-1">
Click to upload or drag and drop
{t('connectors.upload.click-upload')}
</div>
<div className="text-white text-opacity-60 text-xs font-medium py-1">
supports text files, csv's, spreadsheets, audio files, and more!
{t('connectors.upload.file-types')}
</div>
</div>
) : (
@ -128,7 +129,7 @@ export default function UploadFile({
)}
</div>
<div className="text-center text-white text-opacity-50 text-xs font-medium w-[560px] py-2">
or submit a link
{t('connectors.upload.or-submit-link')}
</div>
<form onSubmit={handleSendLink} className="flex gap-x-2">
<input
@ -136,7 +137,7 @@ export default function UploadFile({
name="link"
type="url"
className="border-none disabled:bg-theme-settings-input-bg disabled:text-theme-settings-input-placeholder bg-theme-settings-input-bg text-white placeholder:text-theme-settings-input-placeholder text-sm rounded-lg focus:outline-primary-button active:outline-primary-button outline-none block w-3/4 p-2.5"
placeholder={"https://example.com"}
placeholder={t('connectors.upload.placeholder-link')}
autoComplete="off"
/>
<button
@ -144,13 +145,11 @@ export default function UploadFile({
type="submit"
className="disabled:bg-white/20 disabled:text-slate-300 disabled:border-slate-400 disabled:cursor-wait bg bg-transparent hover:bg-slate-200 hover:text-slate-800 w-auto border border-white light:border-theme-modal-border text-sm text-white p-2.5 rounded-lg"
>
{fetchingUrl ? "Fetching..." : "Fetch website"}
{fetchingUrl ? t('connectors.upload.fetching') : t('connectors.upload.fetch-website')}
</button>
</form>
<div className="mt-6 text-center text-white text-opacity-80 text-xs font-medium w-[560px]">
These files will be uploaded to the document processor running on this
AnythingLLM instance. These files are not sent or shared with a third
party.
{t('connectors.upload.privacy-notice')}
</div>
</div>
);

View file

@ -10,6 +10,7 @@ import { Link } from "react-router-dom";
import Workspace from "@/models/workspace";
import { Tooltip } from "react-tooltip";
import { safeJsonParse } from "@/utils/request";
import { useTranslation } from "react-i18next";
function WorkspaceDirectory({
workspace,
@ -25,6 +26,7 @@ function WorkspaceDirectory({
embeddingCosts,
movedItems,
}) {
const { t } = useTranslation();
const [selectedItems, setSelectedItems] = useState({});
const toggleSelection = (item) => {
@ -182,7 +184,7 @@ function WorkspaceDirectory({
) : (
<div className="w-full h-full flex items-center justify-center">
<p className="text-white text-opacity-40 text-sm font-medium">
No Documents
{t("connectors.directory.no_docs")}
</p>
</div>
)}
@ -201,14 +203,14 @@ function WorkspaceDirectory({
(sum, folder) => sum + folder.items.length,
0
)
? "Deselect All"
: "Select All"}
? t("connectors.directory.deselect_all")
: t("connectors.directory.select_all")}
</button>
<button
onClick={removeSelectedItems}
className="border-none text-sm font-semibold bg-white light:bg-[#E0F2FE] h-[30px] px-2.5 rounded-lg hover:bg-neutral-800/80 hover:text-white light:text-[#026AA2] light:hover:bg-[#026AA2] light:hover:text-white"
>
Remove Selected
{t("connectors.directory.remove_selected")}
</button>
</div>
</div>
@ -229,7 +231,7 @@ function WorkspaceDirectory({
}`}
</p>
<p className="mt-2 text-xs italic" hidden={embeddingCosts === 0}>
*One time cost for embeddings
{t("new-workspace.costs")}
</p>
</div>
@ -237,7 +239,7 @@ function WorkspaceDirectory({
onClick={(e) => handleSaveChanges(e)}
className="border border-slate-200 px-5 py-2.5 rounded-lg text-white text-sm items-center flex gap-x-2 hover:bg-slate-200 hover:text-slate-800 focus:ring-gray-800"
>
Save and Embed
{t("connectors.directory.save_embed")}
</button>
</div>
)}
@ -250,6 +252,7 @@ function WorkspaceDirectory({
}
const PinAlert = memo(() => {
const { t } = useTranslation();
const [showAlert, setShowAlert] = useState(false);
function dismissAlert() {
setShowAlert(false);
@ -277,25 +280,20 @@ const PinAlert = memo(() => {
weight="regular"
/>
<h3 className="text-xl font-semibold text-white">
What is document pinning?
{t("connectors.pinning.what_pinning")}
</h3>
</div>
</div>
<div className="py-7 px-9 space-y-2 flex-col">
<div className="w-full text-white text-md flex flex-col gap-y-2">
<p>
When you <b>pin</b> a document in AnythingLLM we will inject the
entire content of the document into your prompt window for your
LLM to fully comprehend.
<span dangerouslySetInnerHTML={{ __html: t("connectors.pinning.pin_explained_block1") }} />
</p>
<p>
This works best with <b>large-context models</b> or small files
that are critical to its knowledge-base.
<span dangerouslySetInnerHTML={{ __html: t("connectors.pinning.pin_explained_block2") }} />
</p>
<p>
If you are not getting the answers you desire from AnythingLLM by
default then pinning is a great way to get higher quality answers
in a click.
{t("connectors.pinning.pin_explained_block3")}
</p>
</div>
</div>
@ -304,7 +302,7 @@ const PinAlert = memo(() => {
onClick={dismissAlert}
className="transition-all duration-300 bg-white text-black hover:opacity-60 px-4 py-2 rounded-lg text-sm"
>
Okay, got it
{t("connectors.pinning.accept")}
</button>
</div>
</div>
@ -313,6 +311,7 @@ const PinAlert = memo(() => {
});
const DocumentWatchAlert = memo(() => {
const { t } = useTranslation();
const [showAlert, setShowAlert] = useState(false);
function dismissAlert() {
setShowAlert(false);
@ -340,31 +339,27 @@ const DocumentWatchAlert = memo(() => {
weight="regular"
/>
<h3 className="text-xl font-semibold text-white">
What does watching a document do?
{t("connectors.pinning.what_watching")}
</h3>
</div>
</div>
<div className="py-7 px-9 space-y-2 flex-col">
<div className="w-full text-white text-md flex flex-col gap-y-2">
<p>
When you <b>watch</b> a document in AnythingLLM we will{" "}
<i>automatically</i> sync your document content from it's original
source on regular intervals. This will automatically update the
content in every workspace where this file is managed.
<span dangerouslySetInnerHTML={{ __html: t("connectors.watching.watch_explained_block1") }} />
</p>
<p>
This feature currently supports online-based content and will not
be available for manually uploaded documents.
{t("connectors.watching.watch_explained_block2")}
</p>
<p>
You can manage what documents are watched from the{" "}
{t("connectors.watching.watch_explained_block3_start")}
<Link
to={paths.experimental.liveDocumentSync.manage()}
className="text-blue-600 underline"
>
File manager
</Link>{" "}
admin view.
{t("connectors.watching.watch_explained_block3_link")}
</Link>
{t("connectors.watching.watch_explained_block3_end")}
</p>
</div>
</div>
@ -373,7 +368,7 @@ const DocumentWatchAlert = memo(() => {
onClick={dismissAlert}
className="transition-all duration-300 bg-white text-black hover:opacity-60 px-4 py-2 rounded-lg text-sm"
>
Okay, got it
{t("connectors.watching.accept")}
</button>
</div>
</div>

View file

@ -1,5 +1,6 @@
import React, { useState, useEffect, memo } from "react";
import { X } from "@phosphor-icons/react";
import { useTranslation } from "react-i18next";
import { useParams } from "react-router-dom";
import Workspace from "../../../models/workspace";
import System from "../../../models/system";
@ -11,6 +12,7 @@ import ModalWrapper from "@/components/ModalWrapper";
const noop = () => {};
const ManageWorkspace = ({ hideModal = noop, providedSlug = null }) => {
const { t } = useTranslation();
const { slug } = useParams();
const { user } = useUser();
const [workspace, setWorkspace] = useState(null);
@ -42,7 +44,7 @@ const ManageWorkspace = ({ hideModal = noop, providedSlug = null }) => {
<div className="relative p-6 border-b rounded-t border-theme-modal-border">
<div className="w-full flex gap-x-2 items-center">
<h3 className="text-xl font-semibold text-white overflow-hidden overflow-ellipsis whitespace-nowrap">
Editing "{workspace.name}"
{t('connectors.manage.editing')} "{workspace.name}"
</h3>
</div>
<button
@ -59,8 +61,7 @@ const ManageWorkspace = ({ hideModal = noop, providedSlug = null }) => {
>
<div className="py-7 px-9 space-y-2 flex-col">
<p className="text-white">
Editing these settings are only available on a desktop device.
Please access this page on your desktop to continue.
{t('connectors.manage.desktop-only')}
</p>
</div>
</div>
@ -70,7 +71,7 @@ const ManageWorkspace = ({ hideModal = noop, providedSlug = null }) => {
type="button"
className="transition-all duration-300 bg-white text-black hover:opacity-60 px-4 py-2 rounded-lg text-sm"
>
Dismiss
{t('connectors.manage.dismiss')}
</button>
</div>
</div>
@ -114,6 +115,7 @@ const ManageWorkspace = ({ hideModal = noop, providedSlug = null }) => {
export default memo(ManageWorkspace);
const ModalTabSwitcher = ({ selectedTab, setSelectedTab }) => {
const { t } = useTranslation();
return (
<div className="w-full flex justify-center z-10 relative">
<div className="gap-x-2 flex justify-center -mt-[68px] mb-10 bg-theme-bg-secondary p-1 rounded-xl shadow border-2 border-theme-modal-border w-fit">
@ -125,7 +127,7 @@ const ModalTabSwitcher = ({ selectedTab, setSelectedTab }) => {
: "text-white/20 font-medium hover:text-white light:bg-white light:text-[#535862] light:hover:bg-[#E0F2FE]"
}`}
>
Documents
{t('connectors.manage.documents')}
</button>
<button
onClick={() => setSelectedTab("dataConnectors")}
@ -135,7 +137,7 @@ const ModalTabSwitcher = ({ selectedTab, setSelectedTab }) => {
: "text-white/20 font-medium hover:text-white light:bg-white light:text-[#535862] light:hover:bg-[#E0F2FE]"
}`}
>
Data Connectors
{t('connectors.manage.data-connectors')}
</button>
</div>
</div>

View file

@ -6,6 +6,7 @@ import showToast from "@/utils/toast";
import { Plus, X } from "@phosphor-icons/react";
import ModalWrapper from "@/components/ModalWrapper";
import { useTheme } from "@/hooks/useTheme";
import { useTranslation } from "react-i18next";
export default function AccountModal({ user, hideModal }) {
const { pfp, setPfp } = usePfp();
@ -28,6 +29,7 @@ export default function AccountModal({ user, hideModal }) {
};
const handleRemovePfp = async () => {
const { success, error } = await System.removePfp();
if (!success) {
showToast(`Failed to remove profile picture: ${error}`, "error");
@ -39,7 +41,7 @@ export default function AccountModal({ user, hideModal }) {
const handleUpdate = async (e) => {
e.preventDefault();
const data = {};
const form = new FormData(e.target);
for (var [key, value] of form.entries()) {
@ -61,14 +63,14 @@ export default function AccountModal({ user, hideModal }) {
showToast(`Failed to update user: ${error}`, "error");
}
};
const { t } = useTranslation();
return (
<ModalWrapper isOpen={true}>
<div className="w-full max-w-2xl bg-theme-bg-secondary rounded-lg shadow border-2 border-theme-modal-border overflow-hidden">
<div className="relative p-6 border-b rounded-t border-theme-modal-border">
<div className="w-full flex gap-x-2 items-center">
<h3 className="text-xl font-semibold text-white overflow-hidden overflow-ellipsis whitespace-nowrap">
Edit Account
{t('profile_settings.edit_account')}
</h3>
</div>
<button
@ -104,7 +106,7 @@ export default function AccountModal({ user, hideModal }) {
<div className="flex flex-col items-center justify-center p-3">
<Plus className="w-8 h-8 text-theme-text-secondary m-2" />
<span className="text-theme-text-secondary text-opacity-80 text-sm font-semibold">
Profile Picture
{t('profile_settings.profile_picture')}
</span>
<span className="text-theme-text-secondary text-opacity-60 text-xs">
800 x 800
@ -118,7 +120,7 @@ export default function AccountModal({ user, hideModal }) {
onClick={handleRemovePfp}
className="mt-3 text-theme-text-secondary text-opacity-60 text-sm font-medium hover:underline"
>
Remove Profile Picture
{t('profile_settings.remove_profile_picture')}
</button>
)}
</div>
@ -129,7 +131,7 @@ export default function AccountModal({ user, hideModal }) {
htmlFor="username"
className="block mb-2 text-sm font-medium text-theme-text-primary"
>
Username
{t('profile_settings.username')}
</label>
<input
name="username"
@ -142,8 +144,7 @@ export default function AccountModal({ user, hideModal }) {
autoComplete="off"
/>
<p className="mt-2 text-xs text-white/60">
Username must be only contain lowercase letters, numbers,
underscores, and hyphens with no spaces
{t('profile_settings.username_description')}
</p>
</div>
<div>
@ -151,7 +152,7 @@ export default function AccountModal({ user, hideModal }) {
htmlFor="password"
className="block mb-2 text-sm font-medium text-white"
>
New Password
{t('profile_settings.new_password')}
</label>
<input
name="password"
@ -161,7 +162,7 @@ export default function AccountModal({ user, hideModal }) {
minLength={8}
/>
<p className="mt-2 text-xs text-white/60">
Password must be at least 8 characters long
{t('profile_settings.passwort_description')}
</p>
</div>
<div className="flex flex-row gap-x-8">
@ -175,13 +176,13 @@ export default function AccountModal({ user, hideModal }) {
type="button"
className="transition-all duration-300 text-white hover:bg-zinc-700 px-4 py-2 rounded-lg text-sm"
>
Cancel
{t('profile_settings.cancel')}
</button>
<button
type="submit"
className="transition-all duration-300 bg-white text-black hover:opacity-60 px-4 py-2 rounded-lg text-sm"
>
Update Account
{t('profile_settings.update_account')}
</button>
</div>
</form>
@ -198,14 +199,14 @@ function LanguagePreference() {
getLanguageName,
changeLanguage,
} = useLanguageOptions();
const { t } = useTranslation();
return (
<div>
<label
htmlFor="userLang"
className="block mb-2 text-sm font-medium text-white"
>
Preferred language
{t('profile_settings.language')}
</label>
<select
name="userLang"
@ -227,14 +228,14 @@ function LanguagePreference() {
function ThemePreference() {
const { theme, setTheme, availableThemes } = useTheme();
const { t } = useTranslation();
return (
<div>
<label
htmlFor="theme"
className="block mb-2 text-sm font-medium text-white"
>
Theme Preference
{t('profile_settings.theme')}
</label>
<select
name="theme"

View file

@ -14,6 +14,7 @@ import paths from "@/utils/paths";
import Appearance from "@/models/appearance";
import useTextSize from "@/hooks/useTextSize";
import { v4 } from "uuid";
import { useTranslation } from "react-i18next";
export default function ChatHistory({
history = [],
@ -23,6 +24,7 @@ export default function ChatHistory({
regenerateAssistantMessage,
hasAttachments = false,
}) {
const { t } = useTranslation();
const lastScrollTopRef = useRef(0);
const { user } = useUser();
const { threadSlug = null } = useParams();
@ -138,44 +140,6 @@ export default function ChatHistory({
);
};
if (history.length === 0 && !hasAttachments) {
return (
<div className="flex flex-col h-full md:mt-0 pb-44 md:pb-40 w-full justify-end items-center">
<div className="flex flex-col items-center md:items-start md:max-w-[600px] w-full px-4">
<p className="text-white/60 text-lg font-base py-4">
Welcome to your new workspace.
</p>
{!user || user.role !== "default" ? (
<p className="w-full items-center text-white/60 text-lg font-base flex flex-col md:flex-row gap-x-1">
To get started either{" "}
<span
className="underline font-medium cursor-pointer"
onClick={showModal}
>
upload a document
</span>
or <b className="font-medium italic">send a chat.</b>
</p>
) : (
<p className="w-full items-center text-white/60 text-lg font-base flex flex-col md:flex-row gap-x-1">
To get started <b className="font-medium italic">send a chat.</b>
</p>
)}
<WorkspaceChatSuggestions
suggestions={workspace?.suggestedMessages ?? []}
sendSuggestion={handleSendSuggestedMessage}
/>
</div>
{showing && (
<ManageWorkspace
hideModal={hideModal}
providedSlug={workspace.slug}
/>
)}
</div>
);
}
const compiledHistory = useMemo(
() =>
buildMessages({
@ -212,6 +176,44 @@ export default function ChatHistory({
[compiledHistory.length, lastMessageInfo]
);
if (history.length === 0 && !hasAttachments) {
return (
<div className="flex flex-col h-full md:mt-0 pb-44 md:pb-40 w-full justify-end items-center">
<div className="flex flex-col items-center md:items-start md:max-w-[600px] w-full px-4">
<p className="text-white/60 text-lg font-base py-4">
{t('chat_window.welcome')}
</p>
{!user || user.role !== "default" ? (
<p className="w-full items-center text-white/60 text-lg font-base flex flex-col md:flex-row gap-x-1">
{t('chat_window.get_started')}
<span
className="underline font-medium cursor-pointer"
onClick={showModal}
>
{t('chat_window.upload')}
</span>
{t('chat_window.or')} <b className="font-medium italic">{t('chat_window.send_chat')}</b>
</p>
) : (
<p className="w-full items-center text-white/60 text-lg font-base flex flex-col md:flex-row gap-x-1">
{t('chat_window.get_started_default')} <b className="font-medium italic">{t('chat_window.send_chat')}</b>
</p>
)}
<WorkspaceChatSuggestions
suggestions={workspace?.suggestedMessages ?? []}
sendSuggestion={handleSendSuggestedMessage}
/>
</div>
{showing && (
<ManageWorkspace
hideModal={hideModal}
providedSlug={workspace.slug}
/>
)}
</div>
);
}
return (
<div
className={`markdown text-white/80 light:text-theme-text-primary font-light ${textSizeClass} h-full md:h-[83%] pb-[100px] pt-6 md:pt-0 md:pb-20 md:mx-0 overflow-y-scroll flex flex-col justify-start ${showScrollbar ? "show-scrollbar" : "no-scroll"}`}

View file

@ -2,16 +2,18 @@ import { useEffect, useRef, useState } from "react";
import { Tooltip } from "react-tooltip";
import { At } from "@phosphor-icons/react";
import { useIsAgentSessionActive } from "@/utils/chat/agent";
import { useTranslation } from "react-i18next";
export default function AvailableAgentsButton({ showing, setShowAgents }) {
const { t } = useTranslation();
const agentSessionActive = useIsAgentSessionActive();
if (agentSessionActive) return null;
return (
<div
id="agent-list-btn"
data-tooltip-id="tooltip-agent-list-btn"
data-tooltip-content="View all available agents you can use for chatting."
aria-label="View all available agents you can use for chatting."
data-tooltip-content={t('chat_window.agents')}
aria-label={t('chat_window.agents')}
onClick={() => setShowAgents(!showing)}
className={`flex justify-center items-center cursor-pointer ${
showing ? "!opacity-100" : ""

View file

@ -1,12 +1,14 @@
import useUser from "@/hooks/useUser";
import { PaperclipHorizontal } from "@phosphor-icons/react";
import { Tooltip } from "react-tooltip";
import { useTranslation } from "react-i18next";
/**
* This is a simple proxy component that clicks on the DnD file uploader for the user.
* @returns
*/
export default function AttachItem() {
const { t } = useTranslation();
const { user } = useUser();
if (!!user && user.role === "default") return null;
@ -15,8 +17,8 @@ export default function AttachItem() {
<button
id="attach-item-btn"
data-tooltip-id="attach-item-btn"
data-tooltip-content="Attach a file to this chat"
aria-label="Attach a file to this chat"
data-tooltip-content={t('chat_window.attach_file')}
aria-label={t('chat_window.attach_file')}
type="button"
onClick={(e) => {
e?.target?.blur();

View file

@ -4,13 +4,15 @@ import { Tooltip } from "react-tooltip";
import ResetCommand from "./reset";
import EndAgentSession from "./endAgentSession";
import SlashPresets from "./SlashPresets";
import { useTranslation } from "react-i18next";
export default function SlashCommandsButton({ showing, setShowSlashCommand }) {
const { t } = useTranslation();
return (
<div
id="slash-cmd-btn"
data-tooltip-id="tooltip-slash-cmd-btn"
data-tooltip-content="View all available slash commands for chatting."
data-tooltip-content={t('chat_window.slash')}
onClick={() => setShowSlashCommand(!showing)}
className={`flex justify-center items-center cursor-pointer ${
showing ? "!opacity-100" : ""

View file

@ -6,6 +6,7 @@ import SpeechRecognition, {
useSpeechRecognition,
} from "react-speech-recognition";
import { PROMPT_INPUT_EVENT } from "../../PromptInput";
import { useTranslation } from "react-i18next";
let timeout;
const SILENCE_INTERVAL = 3_200; // wait in seconds of silence before closing.
@ -20,7 +21,7 @@ export default function SpeechToText({ sendCommand }) {
} = useSpeechRecognition({
clearTranscriptOnListen: true,
});
const { t } = useTranslation();
function startSTTSession() {
if (!isMicrophoneAvailable) {
alert(
@ -95,8 +96,8 @@ export default function SpeechToText({ sendCommand }) {
<div
id="text-size-btn"
data-tooltip-id="tooltip-text-size-btn"
data-tooltip-content="Speak your prompt"
aria-label="Speak your prompt"
data-tooltip-content={t('chat_window.microphone')}
aria-label={t('chat_window.microphone')}
onClick={listening ? endTTSSession : startSTTSession}
className={`border-none relative flex justify-center items-center opacity-60 hover:opacity-100 light:opacity-100 light:hover:opacity-60 cursor-pointer ${
!!listening ? "!opacity-100" : ""

View file

@ -1,10 +1,12 @@
import { useState, useRef, useEffect } from "react";
import { TextT } from "@phosphor-icons/react";
import { Tooltip } from "react-tooltip";
import { useTranslation } from "react-i18next";
export default function TextSizeButton() {
const [showTextSizeMenu, setShowTextSizeMenu] = useState(false);
const buttonRef = useRef(null);
const { t } = useTranslation();
return (
<>
@ -12,8 +14,8 @@ export default function TextSizeButton() {
ref={buttonRef}
id="text-size-btn"
data-tooltip-id="tooltip-text-size-btn"
data-tooltip-content="Change text size"
aria-label="Change text size"
data-tooltip-content={t('chat_window.text_size')}
aria-label={t('chat_window.text_size')}
onClick={() => setShowTextSizeMenu(!showTextSizeMenu)}
className={`border-none relative flex justify-center items-center opacity-60 hover:opacity-100 light:opacity-100 light:hover:opacity-60 cursor-pointer ${
showTextSizeMenu ? "!opacity-100" : ""

View file

@ -17,6 +17,7 @@ import AttachmentManager from "./Attachments";
import AttachItem from "./AttachItem";
import { PASTE_ATTACHMENT_EVENT } from "../DnDWrapper";
import useTextSize from "@/hooks/useTextSize";
import { useTranslation } from "react-i18next";
export const PROMPT_INPUT_EVENT = "set_prompt_input";
const MAX_EDIT_STACK_SIZE = 100;
@ -29,6 +30,7 @@ export default function PromptInput({
sendCommand,
attachments = [],
}) {
const { t } = useTranslation();
const [promptInput, setPromptInput] = useState("");
const { showAgents, setShowAgents } = useAvailableAgents();
const { showSlashCommand, setShowSlashCommand } = useSlashCommands();
@ -272,7 +274,7 @@ export default function PromptInput({
}}
value={promptInput}
className={`border-none cursor-text max-h-[50vh] md:max-h-[350px] md:min-h-[40px] mx-2 md:mx-0 pt-[12px] w-full leading-5 md:text-md text-white bg-transparent placeholder:text-white/60 light:placeholder:text-theme-text-primary resize-none active:outline-none focus:outline-none flex-grow ${textSizeClass}`}
placeholder={"Send a message"}
placeholder={t('chat_window.send_message')}
/>
{buttonDisabled ? (
<StopGenerationButton />
@ -283,8 +285,8 @@ export default function PromptInput({
type="submit"
className="border-none inline-flex justify-center rounded-2xl cursor-pointer opacity-60 hover:opacity-100 light:opacity-100 light:hover:opacity-60 ml-4"
data-tooltip-id="send-prompt"
data-tooltip-content="Send prompt message to workspace"
aria-label="Send prompt message to workspace"
data-tooltip-content={t('chat_window.send')}
aria-label={t('chat_window.send')}
>
<PaperPlaneRight
color="var(--theme-sidebar-footer-icon-fill)"

View file

@ -4,7 +4,6 @@ import { useEffect, useState } from "react";
// Providers which cannot use this feature for workspace<>model selection
export const DISABLED_PROVIDERS = [
"azure",
"native",
"textgenwebui",
"generic-openai",
"bedrock",
@ -47,7 +46,6 @@ const PROVIDER_DEFAULT_MODELS = {
fireworksai: [],
"nvidia-nim": [],
groq: [],
native: [],
cohere: [
"command-r",
"command-r-plus",

View file

@ -1,5 +1,5 @@
const TRANSLATIONS = {
common: {
common: {
"workspaces-name": "Name der Arbeitsbereiche",
error: "Fehler",
success: "Erfolg",
@ -479,6 +479,185 @@ const TRANSLATIONS = {
vector: "Vektordatenbank",
anonymous: "Anonyme Telemetrie aktiviert",
},
connectors: {
"search-placeholder": "Datenverbindungen durchsuchen",
"no-connectors": "Keine Datenverbindungen gefunden.",
github: {
name: "GitHub Repository",
description: "Importieren Sie ein öffentliches oder privates Github-Repository mit einem einzigen Klick.",
URL: "GitHub Repo URL",
URL_explained: "URL des GitHub-Repositories, das Sie sammeln möchten.",
token: "Github Zugriffstoken",
optional: "optional",
token_explained: "Zugriffstoken um Ratenlimits zu vermeiden.",
token_explained_start: "Ohne einen ",
token_explained_link1: "persönlichen Zugriffstoken",
token_explained_middle: " kann die GitHub-API aufgrund von Ratenlimits die Anzahl der abrufbaren Dateien einschränken. Sie können ",
token_explained_link2: "einen temporären Zugriffstoken erstellen",
token_explained_end: ", um dieses Problem zu vermeiden.",
ignores: "Datei-Ausschlüsse",
git_ignore: "Liste im .gitignore-Format, um bestimmte Dateien während der Sammlung zu ignorieren. Drücken Sie Enter nach jedem Eintrag, den Sie speichern möchten.",
task_explained: "Sobald der Vorgang abgeschlossen ist, sind alle Dateien im Dokumenten-Picker zur Einbettung in Arbeitsbereiche verfügbar.",
branch: "Branch, von dem Sie Dateien sammeln möchten.",
branch_loading: "-- lade verfügbare Branches --",
branch_explained: "Branch, von dem Sie Dateien sammeln möchten.",
token_information: "Ohne Angabe des <b>Github Zugriffstokens</b> kann dieser Datenkonnektor aufgrund der öffentlichen API-Ratenlimits von GitHub nur die <b>Top-Level</b>-Dateien des Repositories sammeln.",
token_personal: "Holen Sie sich hier einen kostenlosen persönlichen Zugriffstoken mit einem GitHub-Konto."
},
gitlab: {
name: "GitLab Repository",
description: "Importieren Sie ein öffentliches oder privates GitLab-Repository mit einem einzigen Klick.",
URL: "GitLab Repo URL",
URL_explained: "URL des GitLab-Repositories, das Sie sammeln möchten.",
token: "GitLab Zugriffstoken",
optional: "optional",
token_explained: "Zugriffstoken zur Vermeidung von Ratenlimits.",
token_description: "Wählen Sie zusätzliche Entitäten aus, die von der GitLab-API abgerufen werden sollen.",
token_explained_start: "Ohne einen ",
token_explained_link1: "persönlichen Zugriffstoken",
token_explained_middle: " kann die GitLab-API aufgrund von Ratenlimits die Anzahl der abrufbaren Dateien einschränken. Sie können ",
token_explained_link2: "einen temporären Zugriffstoken erstellen",
token_explained_end: ", um dieses Problem zu vermeiden.",
fetch_issues: "Issues als Dokumente abrufen",
ignores: "Datei-Ausschlüsse",
git_ignore: "Liste im .gitignore-Format, um bestimmte Dateien während der Sammlung zu ignorieren. Drücken Sie Enter nach jedem Eintrag, den Sie speichern möchten.",
task_explained: "Sobald der Vorgang abgeschlossen ist, sind alle Dateien im Dokumenten-Picker zur Einbettung in Arbeitsbereiche verfügbar.",
branch: "Branch, von dem Sie Dateien sammeln möchten",
branch_loading: "-- lade verfügbare Branches --",
branch_explained: "Branch, von dem Sie Dateien sammeln möchten.",
token_information: "Ohne Angabe des <b>GitLab Zugriffstokens</b> kann dieser Datenkonnektor aufgrund der öffentlichen API-Ratenlimits von GitLab nur die <b>Top-Level</b>-Dateien des Repositories sammeln.",
token_personal: "Holen Sie sich hier einen kostenlosen persönlichen Zugriffstoken mit einem GitLab-Konto."
},
youtube: {
name: "YouTube Transkript",
description: "Importieren Sie die Transkription eines YouTube-Videos über einen Link.",
URL: "YouTube Video URL",
URL_explained_start: "Geben Sie die URL eines beliebigen YouTube-Videos ein, um dessen Transkript abzurufen. Das Video muss über ",
URL_explained_link: "Untertitel",
URL_explained_end: " verfügen.",
task_explained: "Sobald der Vorgang abgeschlossen ist, ist das Transkript im Dokumenten-Picker zur Einbettung in Arbeitsbereiche verfügbar.",
language: "Transkriptsprache",
language_explained: "Wählen Sie die Sprache des Transkripts aus, das Sie sammeln möchten.",
loading_languages: "-- lade verfügbare Sprachen --"
},
"website-depth": {
name: "Massen-Link-Scraper",
description: "Durchsuchen Sie eine Website und ihre Unterlinks bis zu einer bestimmten Tiefe.",
URL: "Website URL",
URL_explained: "Geben Sie die Start-URL der Website ein, die Sie durchsuchen möchten.",
depth: "Durchsuchungstiefe",
depth_explained: "Das ist die Menge an Unterseiten, die abhängig der originalen URL durchsucht werden sollen.",
max_pages: "Maximale Seitenanzahl",
max_pages_explained: "Maximale Anzahl der zu durchsuchenden Seiten.",
task_explained: "Sobald der Vorgang abgeschlossen ist, sind alle gesammelten Inhalte im Dokumenten-Picker zur Einbettung in Arbeitsbereiche verfügbar."
},
confluence: {
name: "Confluence",
description: "Importieren Sie eine komplette Confluence-Seite mit einem einzigen Klick.",
deployment_type: "Confluence Bereitstellungstyp",
deployment_type_explained: "Bestimmen Sie, ob Ihre Confluence-Instanz in der Atlassian Cloud oder selbst gehostet ist.",
base_url: "Confluence Basis-URL",
base_url_explained: "Dies ist die Basis-URL Ihres Confluence-Bereichs.",
space_key: "Confluence Space-Key",
space_key_explained: "Dies ist der Space-Key Ihrer Confluence-Instanz, der verwendet wird. Beginnt normalerweise mit ~",
username: "Confluence Benutzername",
username_explained: "Ihr Confluence Benutzername.",
token: "Confluence API-Token",
token_explained_start: "Ein ",
token_explained_link1: "persönlicher API-Token",
token_explained_middle: " ist erforderlich, um auf Confluence-Seiten zuzugreifen. Sie können ",
token_explained_link2: "hier einen API-Token erstellen",
token_explained_end: ".",
token_desc: "Zugriffstoken für die Authentifizierung.",
task_explained: "Sobald der Vorgang abgeschlossen ist, ist der Seiteninhalt im Dokumenten-Picker zur Einbettung in Arbeitsbereiche verfügbar."
},
manage: {
documents: "Dokumente",
"data-connectors": "Datenverbindungen",
"desktop-only": "Diese Einstellungen können nur auf einem Desktop-Gerät bearbeitet werden. Bitte rufen Sie diese Seite auf Ihrem Desktop auf, um fortzufahren.",
dismiss: "Schließen",
editing: "Bearbeite",
},
directory: {
"my-documents": "Meine Dokumente",
"new-folder": "Neuer Ordner",
"search-document": "Dokument suchen",
"no-documents": "Keine Dokumente",
"move-workspace": "In Arbeitsbereich verschieben",
name: "Name",
"delete-confirmation": "Sind Sie sicher, dass Sie diese Dateien und Ordner löschen möchten?\nDies wird die Dateien vom System entfernen und sie automatisch aus allen vorhandenen Arbeitsbereichen entfernen.\nDiese Aktion kann nicht rückgängig gemacht werden.",
"removing-message": "Entferne {{count}} Dokumente und {{folderCount}} Ordner. Bitte warten.",
"move-success": "{{count}} Dokumente erfolgreich verschoben.",
date: "Datum",
type: "Typ",
select_all:"Alle auswählen",
deselect_all:"Auswahl abbrechen",
no_docs: "Keine Dokumente vorhanden.",
remove_selected: "Ausgewähltes entfernen",
costs: "*Einmalige Kosten für das Einbetten",
save_embed: "Speichern und Einbetten"
},
upload: {
"processor-offline": "Dokumentenprozessor nicht verfügbar",
"processor-offline-desc": "Wir können Ihre Dateien momentan nicht hochladen, da der Dokumentenprozessor offline ist. Bitte versuchen Sie es später erneut.",
"click-upload": "Klicken Sie zum Hochladen oder ziehen Sie Dateien per Drag & Drop",
"file-types": "unterstützt Textdateien, CSVs, Tabellenkalkulationen, Audiodateien und mehr!",
"or-submit-link": "oder einen Link einreichen",
"placeholder-link": "https://beispiel.de",
"fetching": "Wird abgerufen...",
"fetch-website": "Website abrufen",
"privacy-notice": "Diese Dateien werden zum Dokumentenprozessor hochgeladen, der auf dieser AnythingLLM-Instanz läuft. Diese Dateien werden nicht an Dritte gesendet oder geteilt.",
},
pinning: {
what_pinning: "Was bedeutet es Dokumente anzuheften?",
pin_explained_block1: "Wenn du ein Dokument <b>anheftest</b>, wird den kompletten Inhalt des Dokuments mit deinem Prompt versendet, wodurch das LLM den vollen Kontext besitzt",
pin_explained_block2: "Das funktioniert am besten bei <b>sehr großen Dokumenten</b> sowie für kleine Dokumenten, dessen Inhalt für die Wissensbasis absolut wichtig sind.",
pin_explained_block3: "Wenn du nicht standardmäßig die erwünschten Ergebnisse bekommst, kann das anheften eine gute Methode sein, um Antworten mit einer besseren Qualität mit nur einem Klick zu erhalten.",
accept: "Alles klar, ich habe es verstanden."
},
watching: {
what_watching: "Was bedeutet es ein Dokument zu beobachten?",
watch_explained_block1: "Wenn du ein Dokument <b>beobachtest,</b> werden wir <i>automatisch</i> das Dokument von der Datenquelle in regelmäßigen Abständen aktualisieren. Dadurch wird der Inhalt automatisch in allen Arbeitsbereichen aktualisiert, wo sich das Dokument befindet.",
watch_explained_block2: "Diese Funktion unterstützt aktuell nur Online-Quellen und ist somit nicht verfügbar für selbst hochgeladene Dokumente",
watch_explained_block3_start: "Du kannst im ",
watch_explained_block3_link: "Dateimanager",
watch_explained_block3_end: " entscheiden, welche Dokumente du beobachten möchtest.",
accept: "Alles klar, ich habe es verstanden."
}
},
chat_window:{
welcome: "Willkommen zu deinem Arbeitsbereich.",
get_started: "Starte mit ",
get_started_default: "Starte mit ",
upload: "dem Upload von Dokumenten",
or: " oder ",
send_chat: " schreibe im Chat.",
send_message: "Schreibe eine Nachricht",
attach_file: "Füge eine Datei zum Chat hinzu",
slash: "Schau dir alle verfügbaren Slash Befehle für den Chat an.",
agents: "Schau dir alle verfugbaren Agentenfähigkeiten für den Chat an.",
text_size: "Ändere die Größe des Textes.",
microphone: "Spreche deinen Prompt ein.",
send: "Versende den Prompt an den Arbeitsbereich.",
},
profile_settings:{
edit_account: "Account bearbeiten",
profile_picture: "Profilbild",
remove_profile_picture: "Profilbild entfernen",
username: "Nutzername",
username_description: "Der Nutzername darf nur kleine Buchstaben, Zahlen, Unterstrich und Bindestriche ohne Leerzeichen.",
new_password: "Neues Passwort",
passwort_description: "Das Passwort muss mindestens 8 Zeichen haben.",
cancel: "Abbrechen",
update_account: "Account updaten",
theme: "Bevozugtes Design",
language: "Bevorzugte Sprache",
},
};
export default TRANSLATIONS;

View file

@ -490,6 +490,184 @@ const TRANSLATIONS = {
vector: "Vector Database",
anonymous: "Anonymous Telemetry Enabled",
},
connectors: {
"search-placeholder": "Search data connectors",
"no-connectors": "No data connectors found.",
github: {
name: "GitHub Repo",
description: "Import an entire public or private Github repository in a single click.",
URL: "GitHub Repo URL",
URL_explained: "Url of the GitHub repo you wish to collect.",
token: "Github Access Token",
optional: "optional",
token_explained: "Access Token to prevent rate limiting.",
token_explained_start: "Without a ",
token_explained_link1: "Personal Access Token",
token_explained_middle: ", the GitHub API may limit the number of files that can be collected due to rate limits. You can ",
token_explained_link2: "create a temporary Access Token",
token_explained_end: " to avoid this issue.",
ignores: "File Ignores",
git_ignore: "List in .gitignore format to ignore specific files during collection. Press enter after each entry you want to save.",
task_explained: "Once complete, all files will be available for embedding into workspaces in the document picker.",
branch: "Branch you wish to collect files from.",
branch_loading: "-- loading available branches --",
branch_explained: "Branch you wish to collect files from.",
token_information: "Without filling out the <b>Github Access Token</b> this data connector will only be able to collect the <b>top-level</b> files of the repo due to GitHub's public API rate-limits.",
token_personal: "Get a free Personal Access Token with a GitHub account here."
},
gitlab: {
name: "GitLab Repo",
description: "Import an entire public or private GitLab repository in a single click.",
URL: "GitLab Repo URL",
URL_explained: "URL of the GitLab repo you wish to collect.",
token: "GitLab Access Token",
optional: "optional",
token_explained: "Access Token to prevent rate limiting.",
token_description: "Select additional entities to fetch from the GitLab API.",
token_explained_start: "Without a ",
token_explained_link1: "Personal Access Token",
token_explained_middle: ", the GitLab API may limit the number of files that can be collected due to rate limits. You can ",
token_explained_link2: "create a temporary Access Token",
token_explained_end: " to avoid this issue.",
fetch_issues: "Fetch Issues as Documents",
ignores: "File Ignores",
git_ignore: "List in .gitignore format to ignore specific files during collection. Press enter after each entry you want to save.",
task_explained: "Once complete, all files will be available for embedding into workspaces in the document picker.",
branch: "Branch you wish to collect files from",
branch_loading: "-- loading available branches --",
branch_explained: "Branch you wish to collect files from.",
token_information: "Without filling out the <b>GitLab Access Token</b> this data connector will only be able to collect the <b>top-level</b> files of the repo due to GitLab's public API rate-limits.",
token_personal: "Get a free Personal Access Token with a GitLab account here."
},
youtube: {
name: "YouTube Transcript",
description: "Import the transcription of an entire YouTube video from a link.",
URL: "YouTube Video URL",
URL_explained_start: "Enter the URL of any YouTube video to fetch its transcript. The video must have ",
URL_explained_link: "closed captions",
URL_explained_end: " available.",
task_explained: "Once complete, the transcript will be available for embedding into workspaces in the document picker.",
language: "Transcript Language",
language_explained: "Select the language of the transcript you want to collect.",
loading_languages: "-- loading available languages --"
},
"website-depth": {
name: "Bulk Link Scraper",
description: "Scrape a website and its sub-links up to a certain depth.",
URL: "Website URL",
URL_explained: "URL of the website you want to scrape.",
depth: "Crawl Depth",
depth_explained: "This is the number of child-links that the worker should follow from the origin URL.",
max_pages: "Maximum Pages",
max_pages_explained: "Maximum number of links to scrape.",
task_explained: "Once complete, all scraped content will be available for embedding into workspaces in the document picker."
},
confluence: {
name: "Confluence",
description: "Import an entire Confluence page in a single click.",
deployment_type: "Confluence deployment type",
deployment_type_explained: "Determine if your Confluence instance is hosted on Atlassian cloud or self-hosted.",
base_url: "Confluence base URL",
base_url_explained: "This is the base URL of your Confluence space.",
space_key: "Confluence space key",
space_key_explained: "This is the spaces key of your confluence instance that will be used. Usually begins with ~",
username: "Confluence Username",
username_explained: "Your Confluence username.",
token: "Confluence API Token",
token_explained_start: "A ",
token_explained_link1: "Personal API Token",
token_explained_middle: " is required to access Confluence pages. You can ",
token_explained_link2: "create an API Token here",
token_explained_end: ".",
token_desc: "Access token for authentication.",
task_explained: "Once complete, the page content will be available for embedding into workspaces in the document picker."
},
manage: {
documents: "Documents",
"data-connectors": "Data Connectors",
"desktop-only": "Editing these settings are only available on a desktop device. Please access this page on your desktop to continue.",
dismiss: "Dismiss",
editing: "Editing",
},
directory: {
"my-documents": "My Documents",
"new-folder": "New Folder",
"search-document": "Search for document",
"no-documents": "No Documents",
"move-workspace": "Move to Workspace",
name: "Name",
"delete-confirmation": "Are you sure you want to delete these files and folders?\nThis will remove the files from the system and remove them from any existing workspaces automatically.\nThis action is not reversible.",
"removing-message": "Removing {{count}} documents and {{folderCount}} folders. Please wait.",
"move-success": "Successfully moved {{count}} documents.",
date: "Date",
type: "Type",
no_docs: "No Documents",
select_all:"Select All",
deselect_all:"Deselect All",
remove_selected: "Remove Selected",
costs: "*One time cost for embeddings",
save_embed: "Save and Embed",
},
upload: {
"processor-offline": "Document Processor Unavailable",
"processor-offline-desc": "We can't upload your files right now because the document processor is offline. Please try again later.",
"click-upload": "Click to upload or drag and drop",
"file-types": "supports text files, csv's, spreadsheets, audio files, and more!",
"or-submit-link": "or submit a link",
"placeholder-link": "https://example.com",
"fetching": "Fetching...",
"fetch-website": "Fetch website",
"privacy-notice": "These files will be uploaded to the document processor running on this AnythingLLM instance. These files are not sent or shared with a third party.",
},
pinning: {
what_pinning: "What is document pinning?",
pin_explained_block1: "When you <b>pin</b> a document in AnythingLLM we will inject the entire content of the document into your prompt window for your LLM to fully comprehend.",
pin_explained_block2: "This works best with <b>large-context models</b> or small files that are critical to its knowledge-base.",
pin_explained_block3: "If you are not getting the answers you desire from AnythingLLM by default then pinning is a great way to get higher quality answers in a click.",
accept: "Okay, got it"
},
watching: {
what_watching: "What does watching a document do?",
watch_explained_block1: "When you <b>watch</b> a document in AnythingLLM we will <i>automatically</i> sync your document content from it's original source on regular intervals. This will automatically update the content in every workspace where this file is managed.",
watch_explained_block2: "This feature currently supports online-based content and will not be available for manually uploaded documents.",
watch_explained_block3_start: "You can manage what documents are watched from the ",
watch_explained_block3_link: "File manager",
watch_explained_block3_end: " admin view.",
accept: "Okay, got it"
}
},
chat_window:{
welcome: "Welcome to your new workspace.",
get_started: "To get started either",
get_started_default: "To get started",
upload: "upload a document",
or: "or",
send_chat: "send a chat.",
send_message: "Send a message",
attach_file: "Attach a file to this chat",
slash: "View all available slash commands for chatting.",
agents: "View all available agents you can use for chatting.",
text_size: "Change text size.",
microphone: "Speak your prompt.",
send: "Send prompt message to workspace",
},
profile_settings:{
edit_account: "Edit Account",
profile_picture: "Profile Picture",
remove_profile_picture: "Remove Profile Picture",
username: "Username",
username_description: "Username must be only contain lowercase letters, numbers, underscores, and hyphens with no spaces",
new_password: "New Password",
passwort_description: "Password must be at least 8 characters long",
cancel: "Cancel",
update_account: "Update Account",
theme: "Theme Preference",
language: "Preferred language",
},
};
export default TRANSLATIONS;

View file

@ -482,6 +482,184 @@ const TRANSLATIONS = {
vector: "Base de datos de vectores",
anonymous: "Telemetría anónima habilitada",
},
connectors: {
"search-placeholder": "Search data connectors",
"no-connectors": "No data connectors found.",
github: {
name: "GitHub Repo",
description: "Import an entire public or private Github repository in a single click.",
URL: "GitHub Repo URL",
URL_explained: "Url of the GitHub repo you wish to collect.",
token: "Github Access Token",
optional: "optional",
token_explained: "Access Token to prevent rate limiting.",
token_explained_start: "Without a ",
token_explained_link1: "Personal Access Token",
token_explained_middle: ", the GitHub API may limit the number of files that can be collected due to rate limits. You can ",
token_explained_link2: "create a temporary Access Token",
token_explained_end: " to avoid this issue.",
ignores: "File Ignores",
git_ignore: "List in .gitignore format to ignore specific files during collection. Press enter after each entry you want to save.",
task_explained: "Once complete, all files will be available for embedding into workspaces in the document picker.",
branch: "Branch you wish to collect files from.",
branch_loading: "-- loading available branches --",
branch_explained: "Branch you wish to collect files from.",
token_information: "Without filling out the <b>Github Access Token</b> this data connector will only be able to collect the <b>top-level</b> files of the repo due to GitHub's public API rate-limits.",
token_personal: "Get a free Personal Access Token with a GitHub account here."
},
gitlab: {
name: "GitLab Repo",
description: "Import an entire public or private GitLab repository in a single click.",
URL: "GitLab Repo URL",
URL_explained: "URL of the GitLab repo you wish to collect.",
token: "GitLab Access Token",
optional: "optional",
token_explained: "Access Token to prevent rate limiting.",
token_description: "Select additional entities to fetch from the GitLab API.",
token_explained_start: "Without a ",
token_explained_link1: "Personal Access Token",
token_explained_middle: ", the GitLab API may limit the number of files that can be collected due to rate limits. You can ",
token_explained_link2: "create a temporary Access Token",
token_explained_end: " to avoid this issue.",
fetch_issues: "Fetch Issues as Documents",
ignores: "File Ignores",
git_ignore: "List in .gitignore format to ignore specific files during collection. Press enter after each entry you want to save.",
task_explained: "Once complete, all files will be available for embedding into workspaces in the document picker.",
branch: "Branch you wish to collect files from",
branch_loading: "-- loading available branches --",
branch_explained: "Branch you wish to collect files from.",
token_information: "Without filling out the <b>GitLab Access Token</b> this data connector will only be able to collect the <b>top-level</b> files of the repo due to GitLab's public API rate-limits.",
token_personal: "Get a free Personal Access Token with a GitLab account here."
},
youtube: {
name: "YouTube Transcript",
description: "Import the transcription of an entire YouTube video from a link.",
URL: "YouTube Video URL",
URL_explained_start: "Enter the URL of any YouTube video to fetch its transcript. The video must have ",
URL_explained_link: "closed captions",
URL_explained_end: " available.",
task_explained: "Once complete, the transcript will be available for embedding into workspaces in the document picker.",
language: "Transcript Language",
language_explained: "Select the language of the transcript you want to collect.",
loading_languages: "-- loading available languages --"
},
"website-depth": {
name: "Bulk Link Scraper",
description: "Scrape a website and its sub-links up to a certain depth.",
URL: "Website URL",
URL_explained: "URL of the website you want to scrape.",
depth: "Crawl Depth",
depth_explained: "This is the number of child-links that the worker should follow from the origin URL.",
max_pages: "Maximum Pages",
max_pages_explained: "Maximum number of links to scrape.",
task_explained: "Once complete, all scraped content will be available for embedding into workspaces in the document picker."
},
confluence: {
name: "Confluence",
description: "Import an entire Confluence page in a single click.",
deployment_type: "Confluence deployment type",
deployment_type_explained: "Determine if your Confluence instance is hosted on Atlassian cloud or self-hosted.",
base_url: "Confluence base URL",
base_url_explained: "This is the base URL of your Confluence space.",
space_key: "Confluence space key",
space_key_explained: "This is the spaces key of your confluence instance that will be used. Usually begins with ~",
username: "Confluence Username",
username_explained: "Your Confluence username.",
token: "Confluence API Token",
token_explained_start: "A ",
token_explained_link1: "Personal API Token",
token_explained_middle: " is required to access Confluence pages. You can ",
token_explained_link2: "create an API Token here",
token_explained_end: ".",
token_desc: "Access token for authentication.",
task_explained: "Once complete, the page content will be available for embedding into workspaces in the document picker."
},
manage: {
documents: "Documents",
"data-connectors": "Data Connectors",
"desktop-only": "Editing these settings are only available on a desktop device. Please access this page on your desktop to continue.",
dismiss: "Dismiss",
editing: "Editing",
},
directory: {
"my-documents": "My Documents",
"new-folder": "New Folder",
"search-document": "Search for document",
"no-documents": "No Documents",
"move-workspace": "Move to Workspace",
name: "Name",
"delete-confirmation": "Are you sure you want to delete these files and folders?\nThis will remove the files from the system and remove them from any existing workspaces automatically.\nThis action is not reversible.",
"removing-message": "Removing {{count}} documents and {{folderCount}} folders. Please wait.",
"move-success": "Successfully moved {{count}} documents.",
date: "Date",
type: "Type",
no_docs: "No Documents",
select_all:"Select All",
deselect_all:"Deselect All",
remove_selected: "Remove Selected",
costs: "*One time cost for embeddings",
save_embed: "Save and Embed",
},
upload: {
"processor-offline": "Document Processor Unavailable",
"processor-offline-desc": "We can't upload your files right now because the document processor is offline. Please try again later.",
"click-upload": "Click to upload or drag and drop",
"file-types": "supports text files, csv's, spreadsheets, audio files, and more!",
"or-submit-link": "or submit a link",
"placeholder-link": "https://example.com",
"fetching": "Fetching...",
"fetch-website": "Fetch website",
"privacy-notice": "These files will be uploaded to the document processor running on this AnythingLLM instance. These files are not sent or shared with a third party.",
},
pinning: {
what_pinning: "What is document pinning?",
pin_explained_block1: "When you <b>pin</b> a document in AnythingLLM we will inject the entire content of the document into your prompt window for your LLM to fully comprehend.",
pin_explained_block2: "This works best with <b>large-context models</b> or small files that are critical to its knowledge-base.",
pin_explained_block3: "If you are not getting the answers you desire from AnythingLLM by default then pinning is a great way to get higher quality answers in a click.",
accept: "Okay, got it"
},
watching: {
what_watching: "What does watching a document do?",
watch_explained_block1: "When you <b>watch</b> a document in AnythingLLM we will <i>automatically</i> sync your document content from it's original source on regular intervals. This will automatically update the content in every workspace where this file is managed.",
watch_explained_block2: "This feature currently supports online-based content and will not be available for manually uploaded documents.",
watch_explained_block3_start: "You can manage what documents are watched from the ",
watch_explained_block3_link: "File manager",
watch_explained_block3_end: " admin view.",
accept: "Okay, got it"
}
},
chat_window:{
welcome: "Welcome to your new workspace.",
get_started: "To get started either",
get_started_default: "To get started",
upload: "upload a document",
or: "or",
send_chat: "send a chat.",
send_message: "Send a message",
attach_file: "Attach a file to this chat",
slash: "View all available slash commands for chatting.",
agents: "View all available agents you can use for chatting.",
text_size: "Change text size.",
microphone: "Speak your prompt.",
send: "Send prompt message to workspace",
},
profile_settings:{
edit_account: "Edit Account",
profile_picture: "Profile Picture",
remove_profile_picture: "Remove Profile Picture",
username: "Username",
username_description: "Username must be only contain lowercase letters, numbers, underscores, and hyphens with no spaces",
new_password: "New Password",
passwort_description: "Password must be at least 8 characters long",
cancel: "Cancel",
update_account: "Update Account",
theme: "Theme Preference",
language: "Preferred language",
},
};
export default TRANSLATIONS;

View file

@ -486,6 +486,184 @@ const TRANSLATIONS = {
vector: "پایگاه داده برداری",
anonymous: "ارسال تله‌متری ناشناس فعال است",
},
connectors: {
"search-placeholder": "Search data connectors",
"no-connectors": "No data connectors found.",
github: {
name: "GitHub Repo",
description: "Import an entire public or private Github repository in a single click.",
URL: "GitHub Repo URL",
URL_explained: "Url of the GitHub repo you wish to collect.",
token: "Github Access Token",
optional: "optional",
token_explained: "Access Token to prevent rate limiting.",
token_explained_start: "Without a ",
token_explained_link1: "Personal Access Token",
token_explained_middle: ", the GitHub API may limit the number of files that can be collected due to rate limits. You can ",
token_explained_link2: "create a temporary Access Token",
token_explained_end: " to avoid this issue.",
ignores: "File Ignores",
git_ignore: "List in .gitignore format to ignore specific files during collection. Press enter after each entry you want to save.",
task_explained: "Once complete, all files will be available for embedding into workspaces in the document picker.",
branch: "Branch you wish to collect files from.",
branch_loading: "-- loading available branches --",
branch_explained: "Branch you wish to collect files from.",
token_information: "Without filling out the <b>Github Access Token</b> this data connector will only be able to collect the <b>top-level</b> files of the repo due to GitHub's public API rate-limits.",
token_personal: "Get a free Personal Access Token with a GitHub account here."
},
gitlab: {
name: "GitLab Repo",
description: "Import an entire public or private GitLab repository in a single click.",
URL: "GitLab Repo URL",
URL_explained: "URL of the GitLab repo you wish to collect.",
token: "GitLab Access Token",
optional: "optional",
token_explained: "Access Token to prevent rate limiting.",
token_description: "Select additional entities to fetch from the GitLab API.",
token_explained_start: "Without a ",
token_explained_link1: "Personal Access Token",
token_explained_middle: ", the GitLab API may limit the number of files that can be collected due to rate limits. You can ",
token_explained_link2: "create a temporary Access Token",
token_explained_end: " to avoid this issue.",
fetch_issues: "Fetch Issues as Documents",
ignores: "File Ignores",
git_ignore: "List in .gitignore format to ignore specific files during collection. Press enter after each entry you want to save.",
task_explained: "Once complete, all files will be available for embedding into workspaces in the document picker.",
branch: "Branch you wish to collect files from",
branch_loading: "-- loading available branches --",
branch_explained: "Branch you wish to collect files from.",
token_information: "Without filling out the <b>GitLab Access Token</b> this data connector will only be able to collect the <b>top-level</b> files of the repo due to GitLab's public API rate-limits.",
token_personal: "Get a free Personal Access Token with a GitLab account here."
},
youtube: {
name: "YouTube Transcript",
description: "Import the transcription of an entire YouTube video from a link.",
URL: "YouTube Video URL",
URL_explained_start: "Enter the URL of any YouTube video to fetch its transcript. The video must have ",
URL_explained_link: "closed captions",
URL_explained_end: " available.",
task_explained: "Once complete, the transcript will be available for embedding into workspaces in the document picker.",
language: "Transcript Language",
language_explained: "Select the language of the transcript you want to collect.",
loading_languages: "-- loading available languages --"
},
"website-depth": {
name: "Bulk Link Scraper",
description: "Scrape a website and its sub-links up to a certain depth.",
URL: "Website URL",
URL_explained: "URL of the website you want to scrape.",
depth: "Crawl Depth",
depth_explained: "This is the number of child-links that the worker should follow from the origin URL.",
max_pages: "Maximum Pages",
max_pages_explained: "Maximum number of links to scrape.",
task_explained: "Once complete, all scraped content will be available for embedding into workspaces in the document picker."
},
confluence: {
name: "Confluence",
description: "Import an entire Confluence page in a single click.",
deployment_type: "Confluence deployment type",
deployment_type_explained: "Determine if your Confluence instance is hosted on Atlassian cloud or self-hosted.",
base_url: "Confluence base URL",
base_url_explained: "This is the base URL of your Confluence space.",
space_key: "Confluence space key",
space_key_explained: "This is the spaces key of your confluence instance that will be used. Usually begins with ~",
username: "Confluence Username",
username_explained: "Your Confluence username.",
token: "Confluence API Token",
token_explained_start: "A ",
token_explained_link1: "Personal API Token",
token_explained_middle: " is required to access Confluence pages. You can ",
token_explained_link2: "create an API Token here",
token_explained_end: ".",
token_desc: "Access token for authentication.",
task_explained: "Once complete, the page content will be available for embedding into workspaces in the document picker."
},
manage: {
documents: "Documents",
"data-connectors": "Data Connectors",
"desktop-only": "Editing these settings are only available on a desktop device. Please access this page on your desktop to continue.",
dismiss: "Dismiss",
editing: "Editing",
},
directory: {
"my-documents": "My Documents",
"new-folder": "New Folder",
"search-document": "Search for document",
"no-documents": "No Documents",
"move-workspace": "Move to Workspace",
name: "Name",
"delete-confirmation": "Are you sure you want to delete these files and folders?\nThis will remove the files from the system and remove them from any existing workspaces automatically.\nThis action is not reversible.",
"removing-message": "Removing {{count}} documents and {{folderCount}} folders. Please wait.",
"move-success": "Successfully moved {{count}} documents.",
date: "Date",
type: "Type",
no_docs: "No Documents",
select_all:"Select All",
deselect_all:"Deselect All",
remove_selected: "Remove Selected",
costs: "*One time cost for embeddings",
save_embed: "Save and Embed",
},
upload: {
"processor-offline": "Document Processor Unavailable",
"processor-offline-desc": "We can't upload your files right now because the document processor is offline. Please try again later.",
"click-upload": "Click to upload or drag and drop",
"file-types": "supports text files, csv's, spreadsheets, audio files, and more!",
"or-submit-link": "or submit a link",
"placeholder-link": "https://example.com",
"fetching": "Fetching...",
"fetch-website": "Fetch website",
"privacy-notice": "These files will be uploaded to the document processor running on this AnythingLLM instance. These files are not sent or shared with a third party.",
},
pinning: {
what_pinning: "What is document pinning?",
pin_explained_block1: "When you <b>pin</b> a document in AnythingLLM we will inject the entire content of the document into your prompt window for your LLM to fully comprehend.",
pin_explained_block2: "This works best with <b>large-context models</b> or small files that are critical to its knowledge-base.",
pin_explained_block3: "If you are not getting the answers you desire from AnythingLLM by default then pinning is a great way to get higher quality answers in a click.",
accept: "Okay, got it"
},
watching: {
what_watching: "What does watching a document do?",
watch_explained_block1: "When you <b>watch</b> a document in AnythingLLM we will <i>automatically</i> sync your document content from it's original source on regular intervals. This will automatically update the content in every workspace where this file is managed.",
watch_explained_block2: "This feature currently supports online-based content and will not be available for manually uploaded documents.",
watch_explained_block3_start: "You can manage what documents are watched from the ",
watch_explained_block3_link: "File manager",
watch_explained_block3_end: " admin view.",
accept: "Okay, got it"
}
},
chat_window:{
welcome: "Welcome to your new workspace.",
get_started: "To get started either",
get_started_default: "To get started",
upload: "upload a document",
or: "or",
send_chat: "send a chat.",
send_message: "Send a message",
attach_file: "Attach a file to this chat",
slash: "View all available slash commands for chatting.",
agents: "View all available agents you can use for chatting.",
text_size: "Change text size.",
microphone: "Speak your prompt.",
send: "Send prompt message to workspace",
},
profile_settings:{
edit_account: "Edit Account",
profile_picture: "Profile Picture",
remove_profile_picture: "Remove Profile Picture",
username: "Username",
username_description: "Username must be only contain lowercase letters, numbers, underscores, and hyphens with no spaces",
new_password: "New Password",
passwort_description: "Password must be at least 8 characters long",
cancel: "Cancel",
update_account: "Update Account",
theme: "Theme Preference",
language: "Preferred language",
},
};
export default TRANSLATIONS;

View file

@ -498,6 +498,184 @@ const TRANSLATIONS = {
vector: "Base de données vectorielle",
anonymous: "Télémétrie anonyme activée",
},
connectors: {
"search-placeholder": "Search data connectors",
"no-connectors": "No data connectors found.",
github: {
name: "GitHub Repo",
description: "Import an entire public or private Github repository in a single click.",
URL: "GitHub Repo URL",
URL_explained: "Url of the GitHub repo you wish to collect.",
token: "Github Access Token",
optional: "optional",
token_explained: "Access Token to prevent rate limiting.",
token_explained_start: "Without a ",
token_explained_link1: "Personal Access Token",
token_explained_middle: ", the GitHub API may limit the number of files that can be collected due to rate limits. You can ",
token_explained_link2: "create a temporary Access Token",
token_explained_end: " to avoid this issue.",
ignores: "File Ignores",
git_ignore: "List in .gitignore format to ignore specific files during collection. Press enter after each entry you want to save.",
task_explained: "Once complete, all files will be available for embedding into workspaces in the document picker.",
branch: "Branch you wish to collect files from.",
branch_loading: "-- loading available branches --",
branch_explained: "Branch you wish to collect files from.",
token_information: "Without filling out the <b>Github Access Token</b> this data connector will only be able to collect the <b>top-level</b> files of the repo due to GitHub's public API rate-limits.",
token_personal: "Get a free Personal Access Token with a GitHub account here."
},
gitlab: {
name: "GitLab Repo",
description: "Import an entire public or private GitLab repository in a single click.",
URL: "GitLab Repo URL",
URL_explained: "URL of the GitLab repo you wish to collect.",
token: "GitLab Access Token",
optional: "optional",
token_explained: "Access Token to prevent rate limiting.",
token_description: "Select additional entities to fetch from the GitLab API.",
token_explained_start: "Without a ",
token_explained_link1: "Personal Access Token",
token_explained_middle: ", the GitLab API may limit the number of files that can be collected due to rate limits. You can ",
token_explained_link2: "create a temporary Access Token",
token_explained_end: " to avoid this issue.",
fetch_issues: "Fetch Issues as Documents",
ignores: "File Ignores",
git_ignore: "List in .gitignore format to ignore specific files during collection. Press enter after each entry you want to save.",
task_explained: "Once complete, all files will be available for embedding into workspaces in the document picker.",
branch: "Branch you wish to collect files from",
branch_loading: "-- loading available branches --",
branch_explained: "Branch you wish to collect files from.",
token_information: "Without filling out the <b>GitLab Access Token</b> this data connector will only be able to collect the <b>top-level</b> files of the repo due to GitLab's public API rate-limits.",
token_personal: "Get a free Personal Access Token with a GitLab account here."
},
youtube: {
name: "YouTube Transcript",
description: "Import the transcription of an entire YouTube video from a link.",
URL: "YouTube Video URL",
URL_explained_start: "Enter the URL of any YouTube video to fetch its transcript. The video must have ",
URL_explained_link: "closed captions",
URL_explained_end: " available.",
task_explained: "Once complete, the transcript will be available for embedding into workspaces in the document picker.",
language: "Transcript Language",
language_explained: "Select the language of the transcript you want to collect.",
loading_languages: "-- loading available languages --"
},
"website-depth": {
name: "Bulk Link Scraper",
description: "Scrape a website and its sub-links up to a certain depth.",
URL: "Website URL",
URL_explained: "URL of the website you want to scrape.",
depth: "Crawl Depth",
depth_explained: "This is the number of child-links that the worker should follow from the origin URL.",
max_pages: "Maximum Pages",
max_pages_explained: "Maximum number of links to scrape.",
task_explained: "Once complete, all scraped content will be available for embedding into workspaces in the document picker."
},
confluence: {
name: "Confluence",
description: "Import an entire Confluence page in a single click.",
deployment_type: "Confluence deployment type",
deployment_type_explained: "Determine if your Confluence instance is hosted on Atlassian cloud or self-hosted.",
base_url: "Confluence base URL",
base_url_explained: "This is the base URL of your Confluence space.",
space_key: "Confluence space key",
space_key_explained: "This is the spaces key of your confluence instance that will be used. Usually begins with ~",
username: "Confluence Username",
username_explained: "Your Confluence username.",
token: "Confluence API Token",
token_explained_start: "A ",
token_explained_link1: "Personal API Token",
token_explained_middle: " is required to access Confluence pages. You can ",
token_explained_link2: "create an API Token here",
token_explained_end: ".",
token_desc: "Access token for authentication.",
task_explained: "Once complete, the page content will be available for embedding into workspaces in the document picker."
},
manage: {
documents: "Documents",
"data-connectors": "Data Connectors",
"desktop-only": "Editing these settings are only available on a desktop device. Please access this page on your desktop to continue.",
dismiss: "Dismiss",
editing: "Editing",
},
directory: {
"my-documents": "My Documents",
"new-folder": "New Folder",
"search-document": "Search for document",
"no-documents": "No Documents",
"move-workspace": "Move to Workspace",
name: "Name",
"delete-confirmation": "Are you sure you want to delete these files and folders?\nThis will remove the files from the system and remove them from any existing workspaces automatically.\nThis action is not reversible.",
"removing-message": "Removing {{count}} documents and {{folderCount}} folders. Please wait.",
"move-success": "Successfully moved {{count}} documents.",
date: "Date",
type: "Type",
no_docs: "No Documents",
select_all:"Select All",
deselect_all:"Deselect All",
remove_selected: "Remove Selected",
costs: "*One time cost for embeddings",
save_embed: "Save and Embed",
},
upload: {
"processor-offline": "Document Processor Unavailable",
"processor-offline-desc": "We can't upload your files right now because the document processor is offline. Please try again later.",
"click-upload": "Click to upload or drag and drop",
"file-types": "supports text files, csv's, spreadsheets, audio files, and more!",
"or-submit-link": "or submit a link",
"placeholder-link": "https://example.com",
"fetching": "Fetching...",
"fetch-website": "Fetch website",
"privacy-notice": "These files will be uploaded to the document processor running on this AnythingLLM instance. These files are not sent or shared with a third party.",
},
pinning: {
what_pinning: "What is document pinning?",
pin_explained_block1: "When you <b>pin</b> a document in AnythingLLM we will inject the entire content of the document into your prompt window for your LLM to fully comprehend.",
pin_explained_block2: "This works best with <b>large-context models</b> or small files that are critical to its knowledge-base.",
pin_explained_block3: "If you are not getting the answers you desire from AnythingLLM by default then pinning is a great way to get higher quality answers in a click.",
accept: "Okay, got it"
},
watching: {
what_watching: "What does watching a document do?",
watch_explained_block1: "When you <b>watch</b> a document in AnythingLLM we will <i>automatically</i> sync your document content from it's original source on regular intervals. This will automatically update the content in every workspace where this file is managed.",
watch_explained_block2: "This feature currently supports online-based content and will not be available for manually uploaded documents.",
watch_explained_block3_start: "You can manage what documents are watched from the ",
watch_explained_block3_link: "File manager",
watch_explained_block3_end: " admin view.",
accept: "Okay, got it"
}
},
chat_window:{
welcome: "Welcome to your new workspace.",
get_started: "To get started either",
get_started_default: "To get started",
upload: "upload a document",
or: "or",
send_chat: "send a chat.",
send_message: "Send a message",
attach_file: "Attach a file to this chat",
slash: "View all available slash commands for chatting.",
agents: "View all available agents you can use for chatting.",
text_size: "Change text size.",
microphone: "Speak your prompt.",
send: "Send prompt message to workspace",
},
profile_settings:{
edit_account: "Edit Account",
profile_picture: "Profile Picture",
remove_profile_picture: "Remove Profile Picture",
username: "Username",
username_description: "Username must be only contain lowercase letters, numbers, underscores, and hyphens with no spaces",
new_password: "New Password",
passwort_description: "Password must be at least 8 characters long",
cancel: "Cancel",
update_account: "Update Account",
theme: "Theme Preference",
language: "Preferred language",
},
};
export default TRANSLATIONS;

View file

@ -480,6 +480,184 @@ const TRANSLATIONS = {
vector: "בסיס נתונים וקטור",
anonymous: "טלמטריה אנונימית מופעלת",
},
connectors: {
"search-placeholder": "Search data connectors",
"no-connectors": "No data connectors found.",
github: {
name: "GitHub Repo",
description: "Import an entire public or private Github repository in a single click.",
URL: "GitHub Repo URL",
URL_explained: "Url of the GitHub repo you wish to collect.",
token: "Github Access Token",
optional: "optional",
token_explained: "Access Token to prevent rate limiting.",
token_explained_start: "Without a ",
token_explained_link1: "Personal Access Token",
token_explained_middle: ", the GitHub API may limit the number of files that can be collected due to rate limits. You can ",
token_explained_link2: "create a temporary Access Token",
token_explained_end: " to avoid this issue.",
ignores: "File Ignores",
git_ignore: "List in .gitignore format to ignore specific files during collection. Press enter after each entry you want to save.",
task_explained: "Once complete, all files will be available for embedding into workspaces in the document picker.",
branch: "Branch you wish to collect files from.",
branch_loading: "-- loading available branches --",
branch_explained: "Branch you wish to collect files from.",
token_information: "Without filling out the <b>Github Access Token</b> this data connector will only be able to collect the <b>top-level</b> files of the repo due to GitHub's public API rate-limits.",
token_personal: "Get a free Personal Access Token with a GitHub account here."
},
gitlab: {
name: "GitLab Repo",
description: "Import an entire public or private GitLab repository in a single click.",
URL: "GitLab Repo URL",
URL_explained: "URL of the GitLab repo you wish to collect.",
token: "GitLab Access Token",
optional: "optional",
token_explained: "Access Token to prevent rate limiting.",
token_description: "Select additional entities to fetch from the GitLab API.",
token_explained_start: "Without a ",
token_explained_link1: "Personal Access Token",
token_explained_middle: ", the GitLab API may limit the number of files that can be collected due to rate limits. You can ",
token_explained_link2: "create a temporary Access Token",
token_explained_end: " to avoid this issue.",
fetch_issues: "Fetch Issues as Documents",
ignores: "File Ignores",
git_ignore: "List in .gitignore format to ignore specific files during collection. Press enter after each entry you want to save.",
task_explained: "Once complete, all files will be available for embedding into workspaces in the document picker.",
branch: "Branch you wish to collect files from",
branch_loading: "-- loading available branches --",
branch_explained: "Branch you wish to collect files from.",
token_information: "Without filling out the <b>GitLab Access Token</b> this data connector will only be able to collect the <b>top-level</b> files of the repo due to GitLab's public API rate-limits.",
token_personal: "Get a free Personal Access Token with a GitLab account here."
},
youtube: {
name: "YouTube Transcript",
description: "Import the transcription of an entire YouTube video from a link.",
URL: "YouTube Video URL",
URL_explained_start: "Enter the URL of any YouTube video to fetch its transcript. The video must have ",
URL_explained_link: "closed captions",
URL_explained_end: " available.",
task_explained: "Once complete, the transcript will be available for embedding into workspaces in the document picker.",
language: "Transcript Language",
language_explained: "Select the language of the transcript you want to collect.",
loading_languages: "-- loading available languages --"
},
"website-depth": {
name: "Bulk Link Scraper",
description: "Scrape a website and its sub-links up to a certain depth.",
URL: "Website URL",
URL_explained: "URL of the website you want to scrape.",
depth: "Crawl Depth",
depth_explained: "This is the number of child-links that the worker should follow from the origin URL.",
max_pages: "Maximum Pages",
max_pages_explained: "Maximum number of links to scrape.",
task_explained: "Once complete, all scraped content will be available for embedding into workspaces in the document picker."
},
confluence: {
name: "Confluence",
description: "Import an entire Confluence page in a single click.",
deployment_type: "Confluence deployment type",
deployment_type_explained: "Determine if your Confluence instance is hosted on Atlassian cloud or self-hosted.",
base_url: "Confluence base URL",
base_url_explained: "This is the base URL of your Confluence space.",
space_key: "Confluence space key",
space_key_explained: "This is the spaces key of your confluence instance that will be used. Usually begins with ~",
username: "Confluence Username",
username_explained: "Your Confluence username.",
token: "Confluence API Token",
token_explained_start: "A ",
token_explained_link1: "Personal API Token",
token_explained_middle: " is required to access Confluence pages. You can ",
token_explained_link2: "create an API Token here",
token_explained_end: ".",
token_desc: "Access token for authentication.",
task_explained: "Once complete, the page content will be available for embedding into workspaces in the document picker."
},
manage: {
documents: "Documents",
"data-connectors": "Data Connectors",
"desktop-only": "Editing these settings are only available on a desktop device. Please access this page on your desktop to continue.",
dismiss: "Dismiss",
editing: "Editing",
},
directory: {
"my-documents": "My Documents",
"new-folder": "New Folder",
"search-document": "Search for document",
"no-documents": "No Documents",
"move-workspace": "Move to Workspace",
name: "Name",
"delete-confirmation": "Are you sure you want to delete these files and folders?\nThis will remove the files from the system and remove them from any existing workspaces automatically.\nThis action is not reversible.",
"removing-message": "Removing {{count}} documents and {{folderCount}} folders. Please wait.",
"move-success": "Successfully moved {{count}} documents.",
date: "Date",
type: "Type",
no_docs: "No Documents",
select_all:"Select All",
deselect_all:"Deselect All",
remove_selected: "Remove Selected",
costs: "*One time cost for embeddings",
save_embed: "Save and Embed",
},
upload: {
"processor-offline": "Document Processor Unavailable",
"processor-offline-desc": "We can't upload your files right now because the document processor is offline. Please try again later.",
"click-upload": "Click to upload or drag and drop",
"file-types": "supports text files, csv's, spreadsheets, audio files, and more!",
"or-submit-link": "or submit a link",
"placeholder-link": "https://example.com",
"fetching": "Fetching...",
"fetch-website": "Fetch website",
"privacy-notice": "These files will be uploaded to the document processor running on this AnythingLLM instance. These files are not sent or shared with a third party.",
},
pinning: {
what_pinning: "What is document pinning?",
pin_explained_block1: "When you <b>pin</b> a document in AnythingLLM we will inject the entire content of the document into your prompt window for your LLM to fully comprehend.",
pin_explained_block2: "This works best with <b>large-context models</b> or small files that are critical to its knowledge-base.",
pin_explained_block3: "If you are not getting the answers you desire from AnythingLLM by default then pinning is a great way to get higher quality answers in a click.",
accept: "Okay, got it"
},
watching: {
what_watching: "What does watching a document do?",
watch_explained_block1: "When you <b>watch</b> a document in AnythingLLM we will <i>automatically</i> sync your document content from it's original source on regular intervals. This will automatically update the content in every workspace where this file is managed.",
watch_explained_block2: "This feature currently supports online-based content and will not be available for manually uploaded documents.",
watch_explained_block3_start: "You can manage what documents are watched from the ",
watch_explained_block3_link: "File manager",
watch_explained_block3_end: " admin view.",
accept: "Okay, got it"
}
},
chat_window:{
welcome: "Welcome to your new workspace.",
get_started: "To get started either",
get_started_default: "To get started",
upload: "upload a document",
or: "or",
send_chat: "send a chat.",
send_message: "Send a message",
attach_file: "Attach a file to this chat",
slash: "View all available slash commands for chatting.",
agents: "View all available agents you can use for chatting.",
text_size: "Change text size.",
microphone: "Speak your prompt.",
send: "Send prompt message to workspace",
},
profile_settings:{
edit_account: "Edit Account",
profile_picture: "Profile Picture",
remove_profile_picture: "Remove Profile Picture",
username: "Username",
username_description: "Username must be only contain lowercase letters, numbers, underscores, and hyphens with no spaces",
new_password: "New Password",
passwort_description: "Password must be at least 8 characters long",
cancel: "Cancel",
update_account: "Update Account",
theme: "Theme Preference",
language: "Preferred language",
},
};
export default TRANSLATIONS;

View file

@ -495,6 +495,184 @@ const TRANSLATIONS = {
vector: "Database vettoriale",
anonymous: "Telemetria anonima abilitata",
},
connectors: {
"search-placeholder": "Search data connectors",
"no-connectors": "No data connectors found.",
github: {
name: "GitHub Repo",
description: "Import an entire public or private Github repository in a single click.",
URL: "GitHub Repo URL",
URL_explained: "Url of the GitHub repo you wish to collect.",
token: "Github Access Token",
optional: "optional",
token_explained: "Access Token to prevent rate limiting.",
token_explained_start: "Without a ",
token_explained_link1: "Personal Access Token",
token_explained_middle: ", the GitHub API may limit the number of files that can be collected due to rate limits. You can ",
token_explained_link2: "create a temporary Access Token",
token_explained_end: " to avoid this issue.",
ignores: "File Ignores",
git_ignore: "List in .gitignore format to ignore specific files during collection. Press enter after each entry you want to save.",
task_explained: "Once complete, all files will be available for embedding into workspaces in the document picker.",
branch: "Branch you wish to collect files from.",
branch_loading: "-- loading available branches --",
branch_explained: "Branch you wish to collect files from.",
token_information: "Without filling out the <b>Github Access Token</b> this data connector will only be able to collect the <b>top-level</b> files of the repo due to GitHub's public API rate-limits.",
token_personal: "Get a free Personal Access Token with a GitHub account here."
},
gitlab: {
name: "GitLab Repo",
description: "Import an entire public or private GitLab repository in a single click.",
URL: "GitLab Repo URL",
URL_explained: "URL of the GitLab repo you wish to collect.",
token: "GitLab Access Token",
optional: "optional",
token_explained: "Access Token to prevent rate limiting.",
token_description: "Select additional entities to fetch from the GitLab API.",
token_explained_start: "Without a ",
token_explained_link1: "Personal Access Token",
token_explained_middle: ", the GitLab API may limit the number of files that can be collected due to rate limits. You can ",
token_explained_link2: "create a temporary Access Token",
token_explained_end: " to avoid this issue.",
fetch_issues: "Fetch Issues as Documents",
ignores: "File Ignores",
git_ignore: "List in .gitignore format to ignore specific files during collection. Press enter after each entry you want to save.",
task_explained: "Once complete, all files will be available for embedding into workspaces in the document picker.",
branch: "Branch you wish to collect files from",
branch_loading: "-- loading available branches --",
branch_explained: "Branch you wish to collect files from.",
token_information: "Without filling out the <b>GitLab Access Token</b> this data connector will only be able to collect the <b>top-level</b> files of the repo due to GitLab's public API rate-limits.",
token_personal: "Get a free Personal Access Token with a GitLab account here."
},
youtube: {
name: "YouTube Transcript",
description: "Import the transcription of an entire YouTube video from a link.",
URL: "YouTube Video URL",
URL_explained_start: "Enter the URL of any YouTube video to fetch its transcript. The video must have ",
URL_explained_link: "closed captions",
URL_explained_end: " available.",
task_explained: "Once complete, the transcript will be available for embedding into workspaces in the document picker.",
language: "Transcript Language",
language_explained: "Select the language of the transcript you want to collect.",
loading_languages: "-- loading available languages --"
},
"website-depth": {
name: "Bulk Link Scraper",
description: "Scrape a website and its sub-links up to a certain depth.",
URL: "Website URL",
URL_explained: "URL of the website you want to scrape.",
depth: "Crawl Depth",
depth_explained: "This is the number of child-links that the worker should follow from the origin URL.",
max_pages: "Maximum Pages",
max_pages_explained: "Maximum number of links to scrape.",
task_explained: "Once complete, all scraped content will be available for embedding into workspaces in the document picker."
},
confluence: {
name: "Confluence",
description: "Import an entire Confluence page in a single click.",
deployment_type: "Confluence deployment type",
deployment_type_explained: "Determine if your Confluence instance is hosted on Atlassian cloud or self-hosted.",
base_url: "Confluence base URL",
base_url_explained: "This is the base URL of your Confluence space.",
space_key: "Confluence space key",
space_key_explained: "This is the spaces key of your confluence instance that will be used. Usually begins with ~",
username: "Confluence Username",
username_explained: "Your Confluence username.",
token: "Confluence API Token",
token_explained_start: "A ",
token_explained_link1: "Personal API Token",
token_explained_middle: " is required to access Confluence pages. You can ",
token_explained_link2: "create an API Token here",
token_explained_end: ".",
token_desc: "Access token for authentication.",
task_explained: "Once complete, the page content will be available for embedding into workspaces in the document picker."
},
manage: {
documents: "Documents",
"data-connectors": "Data Connectors",
"desktop-only": "Editing these settings are only available on a desktop device. Please access this page on your desktop to continue.",
dismiss: "Dismiss",
editing: "Editing",
},
directory: {
"my-documents": "My Documents",
"new-folder": "New Folder",
"search-document": "Search for document",
"no-documents": "No Documents",
"move-workspace": "Move to Workspace",
name: "Name",
"delete-confirmation": "Are you sure you want to delete these files and folders?\nThis will remove the files from the system and remove them from any existing workspaces automatically.\nThis action is not reversible.",
"removing-message": "Removing {{count}} documents and {{folderCount}} folders. Please wait.",
"move-success": "Successfully moved {{count}} documents.",
date: "Date",
type: "Type",
no_docs: "No Documents",
select_all:"Select All",
deselect_all:"Deselect All",
remove_selected: "Remove Selected",
costs: "*One time cost for embeddings",
save_embed: "Save and Embed",
},
upload: {
"processor-offline": "Document Processor Unavailable",
"processor-offline-desc": "We can't upload your files right now because the document processor is offline. Please try again later.",
"click-upload": "Click to upload or drag and drop",
"file-types": "supports text files, csv's, spreadsheets, audio files, and more!",
"or-submit-link": "or submit a link",
"placeholder-link": "https://example.com",
"fetching": "Fetching...",
"fetch-website": "Fetch website",
"privacy-notice": "These files will be uploaded to the document processor running on this AnythingLLM instance. These files are not sent or shared with a third party.",
},
pinning: {
what_pinning: "What is document pinning?",
pin_explained_block1: "When you <b>pin</b> a document in AnythingLLM we will inject the entire content of the document into your prompt window for your LLM to fully comprehend.",
pin_explained_block2: "This works best with <b>large-context models</b> or small files that are critical to its knowledge-base.",
pin_explained_block3: "If you are not getting the answers you desire from AnythingLLM by default then pinning is a great way to get higher quality answers in a click.",
accept: "Okay, got it"
},
watching: {
what_watching: "What does watching a document do?",
watch_explained_block1: "When you <b>watch</b> a document in AnythingLLM we will <i>automatically</i> sync your document content from it's original source on regular intervals. This will automatically update the content in every workspace where this file is managed.",
watch_explained_block2: "This feature currently supports online-based content and will not be available for manually uploaded documents.",
watch_explained_block3_start: "You can manage what documents are watched from the ",
watch_explained_block3_link: "File manager",
watch_explained_block3_end: " admin view.",
accept: "Okay, got it"
}
},
chat_window:{
welcome: "Welcome to your new workspace.",
get_started: "To get started either",
get_started_default: "To get started",
upload: "upload a document",
or: "or",
send_chat: "send a chat.",
send_message: "Send a message",
attach_file: "Attach a file to this chat",
slash: "View all available slash commands for chatting.",
agents: "View all available agents you can use for chatting.",
text_size: "Change text size.",
microphone: "Speak your prompt.",
send: "Send prompt message to workspace",
},
profile_settings:{
edit_account: "Edit Account",
profile_picture: "Profile Picture",
remove_profile_picture: "Remove Profile Picture",
username: "Username",
username_description: "Username must be only contain lowercase letters, numbers, underscores, and hyphens with no spaces",
new_password: "New Password",
passwort_description: "Password must be at least 8 characters long",
cancel: "Cancel",
update_account: "Update Account",
theme: "Theme Preference",
language: "Preferred language",
},
};
export default TRANSLATIONS;

View file

@ -480,6 +480,184 @@ const TRANSLATIONS = {
vector: "벡터 데이터베이스",
anonymous: "익명 원격 분석 활성화",
},
connectors: {
"search-placeholder": "Search data connectors",
"no-connectors": "No data connectors found.",
github: {
name: "GitHub Repo",
description: "Import an entire public or private Github repository in a single click.",
URL: "GitHub Repo URL",
URL_explained: "Url of the GitHub repo you wish to collect.",
token: "Github Access Token",
optional: "optional",
token_explained: "Access Token to prevent rate limiting.",
token_explained_start: "Without a ",
token_explained_link1: "Personal Access Token",
token_explained_middle: ", the GitHub API may limit the number of files that can be collected due to rate limits. You can ",
token_explained_link2: "create a temporary Access Token",
token_explained_end: " to avoid this issue.",
ignores: "File Ignores",
git_ignore: "List in .gitignore format to ignore specific files during collection. Press enter after each entry you want to save.",
task_explained: "Once complete, all files will be available for embedding into workspaces in the document picker.",
branch: "Branch you wish to collect files from.",
branch_loading: "-- loading available branches --",
branch_explained: "Branch you wish to collect files from.",
token_information: "Without filling out the <b>Github Access Token</b> this data connector will only be able to collect the <b>top-level</b> files of the repo due to GitHub's public API rate-limits.",
token_personal: "Get a free Personal Access Token with a GitHub account here."
},
gitlab: {
name: "GitLab Repo",
description: "Import an entire public or private GitLab repository in a single click.",
URL: "GitLab Repo URL",
URL_explained: "URL of the GitLab repo you wish to collect.",
token: "GitLab Access Token",
optional: "optional",
token_explained: "Access Token to prevent rate limiting.",
token_description: "Select additional entities to fetch from the GitLab API.",
token_explained_start: "Without a ",
token_explained_link1: "Personal Access Token",
token_explained_middle: ", the GitLab API may limit the number of files that can be collected due to rate limits. You can ",
token_explained_link2: "create a temporary Access Token",
token_explained_end: " to avoid this issue.",
fetch_issues: "Fetch Issues as Documents",
ignores: "File Ignores",
git_ignore: "List in .gitignore format to ignore specific files during collection. Press enter after each entry you want to save.",
task_explained: "Once complete, all files will be available for embedding into workspaces in the document picker.",
branch: "Branch you wish to collect files from",
branch_loading: "-- loading available branches --",
branch_explained: "Branch you wish to collect files from.",
token_information: "Without filling out the <b>GitLab Access Token</b> this data connector will only be able to collect the <b>top-level</b> files of the repo due to GitLab's public API rate-limits.",
token_personal: "Get a free Personal Access Token with a GitLab account here."
},
youtube: {
name: "YouTube Transcript",
description: "Import the transcription of an entire YouTube video from a link.",
URL: "YouTube Video URL",
URL_explained_start: "Enter the URL of any YouTube video to fetch its transcript. The video must have ",
URL_explained_link: "closed captions",
URL_explained_end: " available.",
task_explained: "Once complete, the transcript will be available for embedding into workspaces in the document picker.",
language: "Transcript Language",
language_explained: "Select the language of the transcript you want to collect.",
loading_languages: "-- loading available languages --"
},
"website-depth": {
name: "Bulk Link Scraper",
description: "Scrape a website and its sub-links up to a certain depth.",
URL: "Website URL",
URL_explained: "URL of the website you want to scrape.",
depth: "Crawl Depth",
depth_explained: "This is the number of child-links that the worker should follow from the origin URL.",
max_pages: "Maximum Pages",
max_pages_explained: "Maximum number of links to scrape.",
task_explained: "Once complete, all scraped content will be available for embedding into workspaces in the document picker."
},
confluence: {
name: "Confluence",
description: "Import an entire Confluence page in a single click.",
deployment_type: "Confluence deployment type",
deployment_type_explained: "Determine if your Confluence instance is hosted on Atlassian cloud or self-hosted.",
base_url: "Confluence base URL",
base_url_explained: "This is the base URL of your Confluence space.",
space_key: "Confluence space key",
space_key_explained: "This is the spaces key of your confluence instance that will be used. Usually begins with ~",
username: "Confluence Username",
username_explained: "Your Confluence username.",
token: "Confluence API Token",
token_explained_start: "A ",
token_explained_link1: "Personal API Token",
token_explained_middle: " is required to access Confluence pages. You can ",
token_explained_link2: "create an API Token here",
token_explained_end: ".",
token_desc: "Access token for authentication.",
task_explained: "Once complete, the page content will be available for embedding into workspaces in the document picker."
},
manage: {
documents: "Documents",
"data-connectors": "Data Connectors",
"desktop-only": "Editing these settings are only available on a desktop device. Please access this page on your desktop to continue.",
dismiss: "Dismiss",
editing: "Editing",
},
directory: {
"my-documents": "My Documents",
"new-folder": "New Folder",
"search-document": "Search for document",
"no-documents": "No Documents",
"move-workspace": "Move to Workspace",
name: "Name",
"delete-confirmation": "Are you sure you want to delete these files and folders?\nThis will remove the files from the system and remove them from any existing workspaces automatically.\nThis action is not reversible.",
"removing-message": "Removing {{count}} documents and {{folderCount}} folders. Please wait.",
"move-success": "Successfully moved {{count}} documents.",
date: "Date",
type: "Type",
no_docs: "No Documents",
select_all:"Select All",
deselect_all:"Deselect All",
remove_selected: "Remove Selected",
costs: "*One time cost for embeddings",
save_embed: "Save and Embed",
},
upload: {
"processor-offline": "Document Processor Unavailable",
"processor-offline-desc": "We can't upload your files right now because the document processor is offline. Please try again later.",
"click-upload": "Click to upload or drag and drop",
"file-types": "supports text files, csv's, spreadsheets, audio files, and more!",
"or-submit-link": "or submit a link",
"placeholder-link": "https://example.com",
"fetching": "Fetching...",
"fetch-website": "Fetch website",
"privacy-notice": "These files will be uploaded to the document processor running on this AnythingLLM instance. These files are not sent or shared with a third party.",
},
pinning: {
what_pinning: "What is document pinning?",
pin_explained_block1: "When you <b>pin</b> a document in AnythingLLM we will inject the entire content of the document into your prompt window for your LLM to fully comprehend.",
pin_explained_block2: "This works best with <b>large-context models</b> or small files that are critical to its knowledge-base.",
pin_explained_block3: "If you are not getting the answers you desire from AnythingLLM by default then pinning is a great way to get higher quality answers in a click.",
accept: "Okay, got it"
},
watching: {
what_watching: "What does watching a document do?",
watch_explained_block1: "When you <b>watch</b> a document in AnythingLLM we will <i>automatically</i> sync your document content from it's original source on regular intervals. This will automatically update the content in every workspace where this file is managed.",
watch_explained_block2: "This feature currently supports online-based content and will not be available for manually uploaded documents.",
watch_explained_block3_start: "You can manage what documents are watched from the ",
watch_explained_block3_link: "File manager",
watch_explained_block3_end: " admin view.",
accept: "Okay, got it"
}
},
chat_window:{
welcome: "Welcome to your new workspace.",
get_started: "To get started either",
get_started_default: "To get started",
upload: "upload a document",
or: "or",
send_chat: "send a chat.",
send_message: "Send a message",
attach_file: "Attach a file to this chat",
slash: "View all available slash commands for chatting.",
agents: "View all available agents you can use for chatting.",
text_size: "Change text size.",
microphone: "Speak your prompt.",
send: "Send prompt message to workspace",
},
profile_settings:{
edit_account: "Edit Account",
profile_picture: "Profile Picture",
remove_profile_picture: "Remove Profile Picture",
username: "Username",
username_description: "Username must be only contain lowercase letters, numbers, underscores, and hyphens with no spaces",
new_password: "New Password",
passwort_description: "Password must be at least 8 characters long",
cancel: "Cancel",
update_account: "Update Account",
theme: "Theme Preference",
language: "Preferred language",
},
};
export default TRANSLATIONS;

View file

@ -491,6 +491,184 @@ const TRANSLATIONS = {
vector: "Vector Database",
anonymous: "Anonieme Telemetrie Ingeschakeld",
},
connectors: {
"search-placeholder": "Search data connectors",
"no-connectors": "No data connectors found.",
github: {
name: "GitHub Repo",
description: "Import an entire public or private Github repository in a single click.",
URL: "GitHub Repo URL",
URL_explained: "Url of the GitHub repo you wish to collect.",
token: "Github Access Token",
optional: "optional",
token_explained: "Access Token to prevent rate limiting.",
token_explained_start: "Without a ",
token_explained_link1: "Personal Access Token",
token_explained_middle: ", the GitHub API may limit the number of files that can be collected due to rate limits. You can ",
token_explained_link2: "create a temporary Access Token",
token_explained_end: " to avoid this issue.",
ignores: "File Ignores",
git_ignore: "List in .gitignore format to ignore specific files during collection. Press enter after each entry you want to save.",
task_explained: "Once complete, all files will be available for embedding into workspaces in the document picker.",
branch: "Branch you wish to collect files from.",
branch_loading: "-- loading available branches --",
branch_explained: "Branch you wish to collect files from.",
token_information: "Without filling out the <b>Github Access Token</b> this data connector will only be able to collect the <b>top-level</b> files of the repo due to GitHub's public API rate-limits.",
token_personal: "Get a free Personal Access Token with a GitHub account here."
},
gitlab: {
name: "GitLab Repo",
description: "Import an entire public or private GitLab repository in a single click.",
URL: "GitLab Repo URL",
URL_explained: "URL of the GitLab repo you wish to collect.",
token: "GitLab Access Token",
optional: "optional",
token_explained: "Access Token to prevent rate limiting.",
token_description: "Select additional entities to fetch from the GitLab API.",
token_explained_start: "Without a ",
token_explained_link1: "Personal Access Token",
token_explained_middle: ", the GitLab API may limit the number of files that can be collected due to rate limits. You can ",
token_explained_link2: "create a temporary Access Token",
token_explained_end: " to avoid this issue.",
fetch_issues: "Fetch Issues as Documents",
ignores: "File Ignores",
git_ignore: "List in .gitignore format to ignore specific files during collection. Press enter after each entry you want to save.",
task_explained: "Once complete, all files will be available for embedding into workspaces in the document picker.",
branch: "Branch you wish to collect files from",
branch_loading: "-- loading available branches --",
branch_explained: "Branch you wish to collect files from.",
token_information: "Without filling out the <b>GitLab Access Token</b> this data connector will only be able to collect the <b>top-level</b> files of the repo due to GitLab's public API rate-limits.",
token_personal: "Get a free Personal Access Token with a GitLab account here."
},
youtube: {
name: "YouTube Transcript",
description: "Import the transcription of an entire YouTube video from a link.",
URL: "YouTube Video URL",
URL_explained_start: "Enter the URL of any YouTube video to fetch its transcript. The video must have ",
URL_explained_link: "closed captions",
URL_explained_end: " available.",
task_explained: "Once complete, the transcript will be available for embedding into workspaces in the document picker.",
language: "Transcript Language",
language_explained: "Select the language of the transcript you want to collect.",
loading_languages: "-- loading available languages --"
},
"website-depth": {
name: "Bulk Link Scraper",
description: "Scrape a website and its sub-links up to a certain depth.",
URL: "Website URL",
URL_explained: "URL of the website you want to scrape.",
depth: "Crawl Depth",
depth_explained: "This is the number of child-links that the worker should follow from the origin URL.",
max_pages: "Maximum Pages",
max_pages_explained: "Maximum number of links to scrape.",
task_explained: "Once complete, all scraped content will be available for embedding into workspaces in the document picker."
},
confluence: {
name: "Confluence",
description: "Import an entire Confluence page in a single click.",
deployment_type: "Confluence deployment type",
deployment_type_explained: "Determine if your Confluence instance is hosted on Atlassian cloud or self-hosted.",
base_url: "Confluence base URL",
base_url_explained: "This is the base URL of your Confluence space.",
space_key: "Confluence space key",
space_key_explained: "This is the spaces key of your confluence instance that will be used. Usually begins with ~",
username: "Confluence Username",
username_explained: "Your Confluence username.",
token: "Confluence API Token",
token_explained_start: "A ",
token_explained_link1: "Personal API Token",
token_explained_middle: " is required to access Confluence pages. You can ",
token_explained_link2: "create an API Token here",
token_explained_end: ".",
token_desc: "Access token for authentication.",
task_explained: "Once complete, the page content will be available for embedding into workspaces in the document picker."
},
manage: {
documents: "Documents",
"data-connectors": "Data Connectors",
"desktop-only": "Editing these settings are only available on a desktop device. Please access this page on your desktop to continue.",
dismiss: "Dismiss",
editing: "Editing",
},
directory: {
"my-documents": "My Documents",
"new-folder": "New Folder",
"search-document": "Search for document",
"no-documents": "No Documents",
"move-workspace": "Move to Workspace",
name: "Name",
"delete-confirmation": "Are you sure you want to delete these files and folders?\nThis will remove the files from the system and remove them from any existing workspaces automatically.\nThis action is not reversible.",
"removing-message": "Removing {{count}} documents and {{folderCount}} folders. Please wait.",
"move-success": "Successfully moved {{count}} documents.",
date: "Date",
type: "Type",
no_docs: "No Documents",
select_all:"Select All",
deselect_all:"Deselect All",
remove_selected: "Remove Selected",
costs: "*One time cost for embeddings",
save_embed: "Save and Embed",
},
upload: {
"processor-offline": "Document Processor Unavailable",
"processor-offline-desc": "We can't upload your files right now because the document processor is offline. Please try again later.",
"click-upload": "Click to upload or drag and drop",
"file-types": "supports text files, csv's, spreadsheets, audio files, and more!",
"or-submit-link": "or submit a link",
"placeholder-link": "https://example.com",
"fetching": "Fetching...",
"fetch-website": "Fetch website",
"privacy-notice": "These files will be uploaded to the document processor running on this AnythingLLM instance. These files are not sent or shared with a third party.",
},
pinning: {
what_pinning: "What is document pinning?",
pin_explained_block1: "When you <b>pin</b> a document in AnythingLLM we will inject the entire content of the document into your prompt window for your LLM to fully comprehend.",
pin_explained_block2: "This works best with <b>large-context models</b> or small files that are critical to its knowledge-base.",
pin_explained_block3: "If you are not getting the answers you desire from AnythingLLM by default then pinning is a great way to get higher quality answers in a click.",
accept: "Okay, got it"
},
watching: {
what_watching: "What does watching a document do?",
watch_explained_block1: "When you <b>watch</b> a document in AnythingLLM we will <i>automatically</i> sync your document content from it's original source on regular intervals. This will automatically update the content in every workspace where this file is managed.",
watch_explained_block2: "This feature currently supports online-based content and will not be available for manually uploaded documents.",
watch_explained_block3_start: "You can manage what documents are watched from the ",
watch_explained_block3_link: "File manager",
watch_explained_block3_end: " admin view.",
accept: "Okay, got it"
}
},
chat_window:{
welcome: "Welcome to your new workspace.",
get_started: "To get started either",
get_started_default: "To get started",
upload: "upload a document",
or: "or",
send_chat: "send a chat.",
send_message: "Send a message",
attach_file: "Attach a file to this chat",
slash: "View all available slash commands for chatting.",
agents: "View all available agents you can use for chatting.",
text_size: "Change text size.",
microphone: "Speak your prompt.",
send: "Send prompt message to workspace",
},
profile_settings:{
edit_account: "Edit Account",
profile_picture: "Profile Picture",
remove_profile_picture: "Remove Profile Picture",
username: "Username",
username_description: "Username must be only contain lowercase letters, numbers, underscores, and hyphens with no spaces",
new_password: "New Password",
passwort_description: "Password must be at least 8 characters long",
cancel: "Cancel",
update_account: "Update Account",
theme: "Theme Preference",
language: "Preferred language",
},
};
export default TRANSLATIONS;

View file

@ -493,6 +493,184 @@ const TRANSLATIONS = {
vector: "Banco de Dados Vetorial",
anonymous: "Telemetria Anônima Ativada",
},
connectors: {
"search-placeholder": "Search data connectors",
"no-connectors": "No data connectors found.",
github: {
name: "GitHub Repo",
description: "Import an entire public or private Github repository in a single click.",
URL: "GitHub Repo URL",
URL_explained: "Url of the GitHub repo you wish to collect.",
token: "Github Access Token",
optional: "optional",
token_explained: "Access Token to prevent rate limiting.",
token_explained_start: "Without a ",
token_explained_link1: "Personal Access Token",
token_explained_middle: ", the GitHub API may limit the number of files that can be collected due to rate limits. You can ",
token_explained_link2: "create a temporary Access Token",
token_explained_end: " to avoid this issue.",
ignores: "File Ignores",
git_ignore: "List in .gitignore format to ignore specific files during collection. Press enter after each entry you want to save.",
task_explained: "Once complete, all files will be available for embedding into workspaces in the document picker.",
branch: "Branch you wish to collect files from.",
branch_loading: "-- loading available branches --",
branch_explained: "Branch you wish to collect files from.",
token_information: "Without filling out the <b>Github Access Token</b> this data connector will only be able to collect the <b>top-level</b> files of the repo due to GitHub's public API rate-limits.",
token_personal: "Get a free Personal Access Token with a GitHub account here."
},
gitlab: {
name: "GitLab Repo",
description: "Import an entire public or private GitLab repository in a single click.",
URL: "GitLab Repo URL",
URL_explained: "URL of the GitLab repo you wish to collect.",
token: "GitLab Access Token",
optional: "optional",
token_explained: "Access Token to prevent rate limiting.",
token_description: "Select additional entities to fetch from the GitLab API.",
token_explained_start: "Without a ",
token_explained_link1: "Personal Access Token",
token_explained_middle: ", the GitLab API may limit the number of files that can be collected due to rate limits. You can ",
token_explained_link2: "create a temporary Access Token",
token_explained_end: " to avoid this issue.",
fetch_issues: "Fetch Issues as Documents",
ignores: "File Ignores",
git_ignore: "List in .gitignore format to ignore specific files during collection. Press enter after each entry you want to save.",
task_explained: "Once complete, all files will be available for embedding into workspaces in the document picker.",
branch: "Branch you wish to collect files from",
branch_loading: "-- loading available branches --",
branch_explained: "Branch you wish to collect files from.",
token_information: "Without filling out the <b>GitLab Access Token</b> this data connector will only be able to collect the <b>top-level</b> files of the repo due to GitLab's public API rate-limits.",
token_personal: "Get a free Personal Access Token with a GitLab account here."
},
youtube: {
name: "YouTube Transcript",
description: "Import the transcription of an entire YouTube video from a link.",
URL: "YouTube Video URL",
URL_explained_start: "Enter the URL of any YouTube video to fetch its transcript. The video must have ",
URL_explained_link: "closed captions",
URL_explained_end: " available.",
task_explained: "Once complete, the transcript will be available for embedding into workspaces in the document picker.",
language: "Transcript Language",
language_explained: "Select the language of the transcript you want to collect.",
loading_languages: "-- loading available languages --"
},
"website-depth": {
name: "Bulk Link Scraper",
description: "Scrape a website and its sub-links up to a certain depth.",
URL: "Website URL",
URL_explained: "URL of the website you want to scrape.",
depth: "Crawl Depth",
depth_explained: "This is the number of child-links that the worker should follow from the origin URL.",
max_pages: "Maximum Pages",
max_pages_explained: "Maximum number of links to scrape.",
task_explained: "Once complete, all scraped content will be available for embedding into workspaces in the document picker."
},
confluence: {
name: "Confluence",
description: "Import an entire Confluence page in a single click.",
deployment_type: "Confluence deployment type",
deployment_type_explained: "Determine if your Confluence instance is hosted on Atlassian cloud or self-hosted.",
base_url: "Confluence base URL",
base_url_explained: "This is the base URL of your Confluence space.",
space_key: "Confluence space key",
space_key_explained: "This is the spaces key of your confluence instance that will be used. Usually begins with ~",
username: "Confluence Username",
username_explained: "Your Confluence username.",
token: "Confluence API Token",
token_explained_start: "A ",
token_explained_link1: "Personal API Token",
token_explained_middle: " is required to access Confluence pages. You can ",
token_explained_link2: "create an API Token here",
token_explained_end: ".",
token_desc: "Access token for authentication.",
task_explained: "Once complete, the page content will be available for embedding into workspaces in the document picker."
},
manage: {
documents: "Documents",
"data-connectors": "Data Connectors",
"desktop-only": "Editing these settings are only available on a desktop device. Please access this page on your desktop to continue.",
dismiss: "Dismiss",
editing: "Editing",
},
directory: {
"my-documents": "My Documents",
"new-folder": "New Folder",
"search-document": "Search for document",
"no-documents": "No Documents",
"move-workspace": "Move to Workspace",
name: "Name",
"delete-confirmation": "Are you sure you want to delete these files and folders?\nThis will remove the files from the system and remove them from any existing workspaces automatically.\nThis action is not reversible.",
"removing-message": "Removing {{count}} documents and {{folderCount}} folders. Please wait.",
"move-success": "Successfully moved {{count}} documents.",
date: "Date",
type: "Type",
no_docs: "No Documents",
select_all:"Select All",
deselect_all:"Deselect All",
remove_selected: "Remove Selected",
costs: "*One time cost for embeddings",
save_embed: "Save and Embed",
},
upload: {
"processor-offline": "Document Processor Unavailable",
"processor-offline-desc": "We can't upload your files right now because the document processor is offline. Please try again later.",
"click-upload": "Click to upload or drag and drop",
"file-types": "supports text files, csv's, spreadsheets, audio files, and more!",
"or-submit-link": "or submit a link",
"placeholder-link": "https://example.com",
"fetching": "Fetching...",
"fetch-website": "Fetch website",
"privacy-notice": "These files will be uploaded to the document processor running on this AnythingLLM instance. These files are not sent or shared with a third party.",
},
pinning: {
what_pinning: "What is document pinning?",
pin_explained_block1: "When you <b>pin</b> a document in AnythingLLM we will inject the entire content of the document into your prompt window for your LLM to fully comprehend.",
pin_explained_block2: "This works best with <b>large-context models</b> or small files that are critical to its knowledge-base.",
pin_explained_block3: "If you are not getting the answers you desire from AnythingLLM by default then pinning is a great way to get higher quality answers in a click.",
accept: "Okay, got it"
},
watching: {
what_watching: "What does watching a document do?",
watch_explained_block1: "When you <b>watch</b> a document in AnythingLLM we will <i>automatically</i> sync your document content from it's original source on regular intervals. This will automatically update the content in every workspace where this file is managed.",
watch_explained_block2: "This feature currently supports online-based content and will not be available for manually uploaded documents.",
watch_explained_block3_start: "You can manage what documents are watched from the ",
watch_explained_block3_link: "File manager",
watch_explained_block3_end: " admin view.",
accept: "Okay, got it"
}
},
chat_window:{
welcome: "Welcome to your new workspace.",
get_started: "To get started either",
get_started_default: "To get started",
upload: "upload a document",
or: "or",
send_chat: "send a chat.",
send_message: "Send a message",
attach_file: "Attach a file to this chat",
slash: "View all available slash commands for chatting.",
agents: "View all available agents you can use for chatting.",
text_size: "Change text size.",
microphone: "Speak your prompt.",
send: "Send prompt message to workspace",
},
profile_settings:{
edit_account: "Edit Account",
profile_picture: "Profile Picture",
remove_profile_picture: "Remove Profile Picture",
username: "Username",
username_description: "Username must be only contain lowercase letters, numbers, underscores, and hyphens with no spaces",
new_password: "New Password",
passwort_description: "Password must be at least 8 characters long",
cancel: "Cancel",
update_account: "Update Account",
theme: "Theme Preference",
language: "Preferred language",
},
};
export default TRANSLATIONS;

View file

@ -460,6 +460,184 @@ const TRANSLATIONS = {
vector: "Векторная база данных",
anonymous: "Анонимная телеметрия включена",
},
connectors: {
"search-placeholder": "Search data connectors",
"no-connectors": "No data connectors found.",
github: {
name: "GitHub Repo",
description: "Import an entire public or private Github repository in a single click.",
URL: "GitHub Repo URL",
URL_explained: "Url of the GitHub repo you wish to collect.",
token: "Github Access Token",
optional: "optional",
token_explained: "Access Token to prevent rate limiting.",
token_explained_start: "Without a ",
token_explained_link1: "Personal Access Token",
token_explained_middle: ", the GitHub API may limit the number of files that can be collected due to rate limits. You can ",
token_explained_link2: "create a temporary Access Token",
token_explained_end: " to avoid this issue.",
ignores: "File Ignores",
git_ignore: "List in .gitignore format to ignore specific files during collection. Press enter after each entry you want to save.",
task_explained: "Once complete, all files will be available for embedding into workspaces in the document picker.",
branch: "Branch you wish to collect files from.",
branch_loading: "-- loading available branches --",
branch_explained: "Branch you wish to collect files from.",
token_information: "Without filling out the <b>Github Access Token</b> this data connector will only be able to collect the <b>top-level</b> files of the repo due to GitHub's public API rate-limits.",
token_personal: "Get a free Personal Access Token with a GitHub account here."
},
gitlab: {
name: "GitLab Repo",
description: "Import an entire public or private GitLab repository in a single click.",
URL: "GitLab Repo URL",
URL_explained: "URL of the GitLab repo you wish to collect.",
token: "GitLab Access Token",
optional: "optional",
token_explained: "Access Token to prevent rate limiting.",
token_description: "Select additional entities to fetch from the GitLab API.",
token_explained_start: "Without a ",
token_explained_link1: "Personal Access Token",
token_explained_middle: ", the GitLab API may limit the number of files that can be collected due to rate limits. You can ",
token_explained_link2: "create a temporary Access Token",
token_explained_end: " to avoid this issue.",
fetch_issues: "Fetch Issues as Documents",
ignores: "File Ignores",
git_ignore: "List in .gitignore format to ignore specific files during collection. Press enter after each entry you want to save.",
task_explained: "Once complete, all files will be available for embedding into workspaces in the document picker.",
branch: "Branch you wish to collect files from",
branch_loading: "-- loading available branches --",
branch_explained: "Branch you wish to collect files from.",
token_information: "Without filling out the <b>GitLab Access Token</b> this data connector will only be able to collect the <b>top-level</b> files of the repo due to GitLab's public API rate-limits.",
token_personal: "Get a free Personal Access Token with a GitLab account here."
},
youtube: {
name: "YouTube Transcript",
description: "Import the transcription of an entire YouTube video from a link.",
URL: "YouTube Video URL",
URL_explained_start: "Enter the URL of any YouTube video to fetch its transcript. The video must have ",
URL_explained_link: "closed captions",
URL_explained_end: " available.",
task_explained: "Once complete, the transcript will be available for embedding into workspaces in the document picker.",
language: "Transcript Language",
language_explained: "Select the language of the transcript you want to collect.",
loading_languages: "-- loading available languages --"
},
"website-depth": {
name: "Bulk Link Scraper",
description: "Scrape a website and its sub-links up to a certain depth.",
URL: "Website URL",
URL_explained: "URL of the website you want to scrape.",
depth: "Crawl Depth",
depth_explained: "This is the number of child-links that the worker should follow from the origin URL.",
max_pages: "Maximum Pages",
max_pages_explained: "Maximum number of links to scrape.",
task_explained: "Once complete, all scraped content will be available for embedding into workspaces in the document picker."
},
confluence: {
name: "Confluence",
description: "Import an entire Confluence page in a single click.",
deployment_type: "Confluence deployment type",
deployment_type_explained: "Determine if your Confluence instance is hosted on Atlassian cloud or self-hosted.",
base_url: "Confluence base URL",
base_url_explained: "This is the base URL of your Confluence space.",
space_key: "Confluence space key",
space_key_explained: "This is the spaces key of your confluence instance that will be used. Usually begins with ~",
username: "Confluence Username",
username_explained: "Your Confluence username.",
token: "Confluence API Token",
token_explained_start: "A ",
token_explained_link1: "Personal API Token",
token_explained_middle: " is required to access Confluence pages. You can ",
token_explained_link2: "create an API Token here",
token_explained_end: ".",
token_desc: "Access token for authentication.",
task_explained: "Once complete, the page content will be available for embedding into workspaces in the document picker."
},
manage: {
documents: "Documents",
"data-connectors": "Data Connectors",
"desktop-only": "Editing these settings are only available on a desktop device. Please access this page on your desktop to continue.",
dismiss: "Dismiss",
editing: "Editing",
},
directory: {
"my-documents": "My Documents",
"new-folder": "New Folder",
"search-document": "Search for document",
"no-documents": "No Documents",
"move-workspace": "Move to Workspace",
name: "Name",
"delete-confirmation": "Are you sure you want to delete these files and folders?\nThis will remove the files from the system and remove them from any existing workspaces automatically.\nThis action is not reversible.",
"removing-message": "Removing {{count}} documents and {{folderCount}} folders. Please wait.",
"move-success": "Successfully moved {{count}} documents.",
date: "Date",
type: "Type",
no_docs: "No Documents",
select_all:"Select All",
deselect_all:"Deselect All",
remove_selected: "Remove Selected",
costs: "*One time cost for embeddings",
save_embed: "Save and Embed",
},
upload: {
"processor-offline": "Document Processor Unavailable",
"processor-offline-desc": "We can't upload your files right now because the document processor is offline. Please try again later.",
"click-upload": "Click to upload or drag and drop",
"file-types": "supports text files, csv's, spreadsheets, audio files, and more!",
"or-submit-link": "or submit a link",
"placeholder-link": "https://example.com",
"fetching": "Fetching...",
"fetch-website": "Fetch website",
"privacy-notice": "These files will be uploaded to the document processor running on this AnythingLLM instance. These files are not sent or shared with a third party.",
},
pinning: {
what_pinning: "What is document pinning?",
pin_explained_block1: "When you <b>pin</b> a document in AnythingLLM we will inject the entire content of the document into your prompt window for your LLM to fully comprehend.",
pin_explained_block2: "This works best with <b>large-context models</b> or small files that are critical to its knowledge-base.",
pin_explained_block3: "If you are not getting the answers you desire from AnythingLLM by default then pinning is a great way to get higher quality answers in a click.",
accept: "Okay, got it"
},
watching: {
what_watching: "What does watching a document do?",
watch_explained_block1: "When you <b>watch</b> a document in AnythingLLM we will <i>automatically</i> sync your document content from it's original source on regular intervals. This will automatically update the content in every workspace where this file is managed.",
watch_explained_block2: "This feature currently supports online-based content and will not be available for manually uploaded documents.",
watch_explained_block3_start: "You can manage what documents are watched from the ",
watch_explained_block3_link: "File manager",
watch_explained_block3_end: " admin view.",
accept: "Okay, got it"
}
},
chat_window:{
welcome: "Welcome to your new workspace.",
get_started: "To get started either",
get_started_default: "To get started",
upload: "upload a document",
or: "or",
send_chat: "send a chat.",
send_message: "Send a message",
attach_file: "Attach a file to this chat",
slash: "View all available slash commands for chatting.",
agents: "View all available agents you can use for chatting.",
text_size: "Change text size.",
microphone: "Speak your prompt.",
send: "Send prompt message to workspace",
},
profile_settings:{
edit_account: "Edit Account",
profile_picture: "Profile Picture",
remove_profile_picture: "Remove Profile Picture",
username: "Username",
username_description: "Username must be only contain lowercase letters, numbers, underscores, and hyphens with no spaces",
new_password: "New Password",
passwort_description: "Password must be at least 8 characters long",
cancel: "Cancel",
update_account: "Update Account",
theme: "Theme Preference",
language: "Preferred language",
},
};
export default TRANSLATIONS;

View file

@ -489,6 +489,184 @@ const TRANSLATIONS = {
vector: "Cơ sở dữ liệu Vector",
anonymous: "Anonymous Telemetry Enabled",
},
connectors: {
"search-placeholder": "Search data connectors",
"no-connectors": "No data connectors found.",
github: {
name: "GitHub Repo",
description: "Import an entire public or private Github repository in a single click.",
URL: "GitHub Repo URL",
URL_explained: "Url of the GitHub repo you wish to collect.",
token: "Github Access Token",
optional: "optional",
token_explained: "Access Token to prevent rate limiting.",
token_explained_start: "Without a ",
token_explained_link1: "Personal Access Token",
token_explained_middle: ", the GitHub API may limit the number of files that can be collected due to rate limits. You can ",
token_explained_link2: "create a temporary Access Token",
token_explained_end: " to avoid this issue.",
ignores: "File Ignores",
git_ignore: "List in .gitignore format to ignore specific files during collection. Press enter after each entry you want to save.",
task_explained: "Once complete, all files will be available for embedding into workspaces in the document picker.",
branch: "Branch you wish to collect files from.",
branch_loading: "-- loading available branches --",
branch_explained: "Branch you wish to collect files from.",
token_information: "Without filling out the <b>Github Access Token</b> this data connector will only be able to collect the <b>top-level</b> files of the repo due to GitHub's public API rate-limits.",
token_personal: "Get a free Personal Access Token with a GitHub account here."
},
gitlab: {
name: "GitLab Repo",
description: "Import an entire public or private GitLab repository in a single click.",
URL: "GitLab Repo URL",
URL_explained: "URL of the GitLab repo you wish to collect.",
token: "GitLab Access Token",
optional: "optional",
token_explained: "Access Token to prevent rate limiting.",
token_description: "Select additional entities to fetch from the GitLab API.",
token_explained_start: "Without a ",
token_explained_link1: "Personal Access Token",
token_explained_middle: ", the GitLab API may limit the number of files that can be collected due to rate limits. You can ",
token_explained_link2: "create a temporary Access Token",
token_explained_end: " to avoid this issue.",
fetch_issues: "Fetch Issues as Documents",
ignores: "File Ignores",
git_ignore: "List in .gitignore format to ignore specific files during collection. Press enter after each entry you want to save.",
task_explained: "Once complete, all files will be available for embedding into workspaces in the document picker.",
branch: "Branch you wish to collect files from",
branch_loading: "-- loading available branches --",
branch_explained: "Branch you wish to collect files from.",
token_information: "Without filling out the <b>GitLab Access Token</b> this data connector will only be able to collect the <b>top-level</b> files of the repo due to GitLab's public API rate-limits.",
token_personal: "Get a free Personal Access Token with a GitLab account here."
},
youtube: {
name: "YouTube Transcript",
description: "Import the transcription of an entire YouTube video from a link.",
URL: "YouTube Video URL",
URL_explained_start: "Enter the URL of any YouTube video to fetch its transcript. The video must have ",
URL_explained_link: "closed captions",
URL_explained_end: " available.",
task_explained: "Once complete, the transcript will be available for embedding into workspaces in the document picker.",
language: "Transcript Language",
language_explained: "Select the language of the transcript you want to collect.",
loading_languages: "-- loading available languages --"
},
"website-depth": {
name: "Bulk Link Scraper",
description: "Scrape a website and its sub-links up to a certain depth.",
URL: "Website URL",
URL_explained: "URL of the website you want to scrape.",
depth: "Crawl Depth",
depth_explained: "This is the number of child-links that the worker should follow from the origin URL.",
max_pages: "Maximum Pages",
max_pages_explained: "Maximum number of links to scrape.",
task_explained: "Once complete, all scraped content will be available for embedding into workspaces in the document picker."
},
confluence: {
name: "Confluence",
description: "Import an entire Confluence page in a single click.",
deployment_type: "Confluence deployment type",
deployment_type_explained: "Determine if your Confluence instance is hosted on Atlassian cloud or self-hosted.",
base_url: "Confluence base URL",
base_url_explained: "This is the base URL of your Confluence space.",
space_key: "Confluence space key",
space_key_explained: "This is the spaces key of your confluence instance that will be used. Usually begins with ~",
username: "Confluence Username",
username_explained: "Your Confluence username.",
token: "Confluence API Token",
token_explained_start: "A ",
token_explained_link1: "Personal API Token",
token_explained_middle: " is required to access Confluence pages. You can ",
token_explained_link2: "create an API Token here",
token_explained_end: ".",
token_desc: "Access token for authentication.",
task_explained: "Once complete, the page content will be available for embedding into workspaces in the document picker."
},
manage: {
documents: "Documents",
"data-connectors": "Data Connectors",
"desktop-only": "Editing these settings are only available on a desktop device. Please access this page on your desktop to continue.",
dismiss: "Dismiss",
editing: "Editing",
},
directory: {
"my-documents": "My Documents",
"new-folder": "New Folder",
"search-document": "Search for document",
"no-documents": "No Documents",
"move-workspace": "Move to Workspace",
name: "Name",
"delete-confirmation": "Are you sure you want to delete these files and folders?\nThis will remove the files from the system and remove them from any existing workspaces automatically.\nThis action is not reversible.",
"removing-message": "Removing {{count}} documents and {{folderCount}} folders. Please wait.",
"move-success": "Successfully moved {{count}} documents.",
date: "Date",
type: "Type",
no_docs: "No Documents",
select_all:"Select All",
deselect_all:"Deselect All",
remove_selected: "Remove Selected",
costs: "*One time cost for embeddings",
save_embed: "Save and Embed",
},
upload: {
"processor-offline": "Document Processor Unavailable",
"processor-offline-desc": "We can't upload your files right now because the document processor is offline. Please try again later.",
"click-upload": "Click to upload or drag and drop",
"file-types": "supports text files, csv's, spreadsheets, audio files, and more!",
"or-submit-link": "or submit a link",
"placeholder-link": "https://example.com",
"fetching": "Fetching...",
"fetch-website": "Fetch website",
"privacy-notice": "These files will be uploaded to the document processor running on this AnythingLLM instance. These files are not sent or shared with a third party.",
},
pinning: {
what_pinning: "What is document pinning?",
pin_explained_block1: "When you <b>pin</b> a document in AnythingLLM we will inject the entire content of the document into your prompt window for your LLM to fully comprehend.",
pin_explained_block2: "This works best with <b>large-context models</b> or small files that are critical to its knowledge-base.",
pin_explained_block3: "If you are not getting the answers you desire from AnythingLLM by default then pinning is a great way to get higher quality answers in a click.",
accept: "Okay, got it"
},
watching: {
what_watching: "What does watching a document do?",
watch_explained_block1: "When you <b>watch</b> a document in AnythingLLM we will <i>automatically</i> sync your document content from it's original source on regular intervals. This will automatically update the content in every workspace where this file is managed.",
watch_explained_block2: "This feature currently supports online-based content and will not be available for manually uploaded documents.",
watch_explained_block3_start: "You can manage what documents are watched from the ",
watch_explained_block3_link: "File manager",
watch_explained_block3_end: " admin view.",
accept: "Okay, got it"
}
},
chat_window:{
welcome: "Welcome to your new workspace.",
get_started: "To get started either",
get_started_default: "To get started",
upload: "upload a document",
or: "or",
send_chat: "send a chat.",
send_message: "Send a message",
attach_file: "Attach a file to this chat",
slash: "View all available slash commands for chatting.",
agents: "View all available agents you can use for chatting.",
text_size: "Change text size.",
microphone: "Speak your prompt.",
send: "Send prompt message to workspace",
},
profile_settings:{
edit_account: "Edit Account",
profile_picture: "Profile Picture",
remove_profile_picture: "Remove Profile Picture",
username: "Username",
username_description: "Username must be only contain lowercase letters, numbers, underscores, and hyphens with no spaces",
new_password: "New Password",
passwort_description: "Password must be at least 8 characters long",
cancel: "Cancel",
update_account: "Update Account",
theme: "Theme Preference",
language: "Preferred language",
},
};
export default TRANSLATIONS;

View file

@ -463,6 +463,184 @@ const TRANSLATIONS = {
vector: "向量数据库",
anonymous: "启用匿名遥测",
},
connectors: {
"search-placeholder": "Search data connectors",
"no-connectors": "No data connectors found.",
github: {
name: "GitHub Repo",
description: "Import an entire public or private Github repository in a single click.",
URL: "GitHub Repo URL",
URL_explained: "Url of the GitHub repo you wish to collect.",
token: "Github Access Token",
optional: "optional",
token_explained: "Access Token to prevent rate limiting.",
token_explained_start: "Without a ",
token_explained_link1: "Personal Access Token",
token_explained_middle: ", the GitHub API may limit the number of files that can be collected due to rate limits. You can ",
token_explained_link2: "create a temporary Access Token",
token_explained_end: " to avoid this issue.",
ignores: "File Ignores",
git_ignore: "List in .gitignore format to ignore specific files during collection. Press enter after each entry you want to save.",
task_explained: "Once complete, all files will be available for embedding into workspaces in the document picker.",
branch: "Branch you wish to collect files from.",
branch_loading: "-- loading available branches --",
branch_explained: "Branch you wish to collect files from.",
token_information: "Without filling out the <b>Github Access Token</b> this data connector will only be able to collect the <b>top-level</b> files of the repo due to GitHub's public API rate-limits.",
token_personal: "Get a free Personal Access Token with a GitHub account here."
},
gitlab: {
name: "GitLab Repo",
description: "Import an entire public or private GitLab repository in a single click.",
URL: "GitLab Repo URL",
URL_explained: "URL of the GitLab repo you wish to collect.",
token: "GitLab Access Token",
optional: "optional",
token_explained: "Access Token to prevent rate limiting.",
token_description: "Select additional entities to fetch from the GitLab API.",
token_explained_start: "Without a ",
token_explained_link1: "Personal Access Token",
token_explained_middle: ", the GitLab API may limit the number of files that can be collected due to rate limits. You can ",
token_explained_link2: "create a temporary Access Token",
token_explained_end: " to avoid this issue.",
fetch_issues: "Fetch Issues as Documents",
ignores: "File Ignores",
git_ignore: "List in .gitignore format to ignore specific files during collection. Press enter after each entry you want to save.",
task_explained: "Once complete, all files will be available for embedding into workspaces in the document picker.",
branch: "Branch you wish to collect files from",
branch_loading: "-- loading available branches --",
branch_explained: "Branch you wish to collect files from.",
token_information: "Without filling out the <b>GitLab Access Token</b> this data connector will only be able to collect the <b>top-level</b> files of the repo due to GitLab's public API rate-limits.",
token_personal: "Get a free Personal Access Token with a GitLab account here."
},
youtube: {
name: "YouTube Transcript",
description: "Import the transcription of an entire YouTube video from a link.",
URL: "YouTube Video URL",
URL_explained_start: "Enter the URL of any YouTube video to fetch its transcript. The video must have ",
URL_explained_link: "closed captions",
URL_explained_end: " available.",
task_explained: "Once complete, the transcript will be available for embedding into workspaces in the document picker.",
language: "Transcript Language",
language_explained: "Select the language of the transcript you want to collect.",
loading_languages: "-- loading available languages --"
},
"website-depth": {
name: "Bulk Link Scraper",
description: "Scrape a website and its sub-links up to a certain depth.",
URL: "Website URL",
URL_explained: "URL of the website you want to scrape.",
depth: "Crawl Depth",
depth_explained: "This is the number of child-links that the worker should follow from the origin URL.",
max_pages: "Maximum Pages",
max_pages_explained: "Maximum number of links to scrape.",
task_explained: "Once complete, all scraped content will be available for embedding into workspaces in the document picker."
},
confluence: {
name: "Confluence",
description: "Import an entire Confluence page in a single click.",
deployment_type: "Confluence deployment type",
deployment_type_explained: "Determine if your Confluence instance is hosted on Atlassian cloud or self-hosted.",
base_url: "Confluence base URL",
base_url_explained: "This is the base URL of your Confluence space.",
space_key: "Confluence space key",
space_key_explained: "This is the spaces key of your confluence instance that will be used. Usually begins with ~",
username: "Confluence Username",
username_explained: "Your Confluence username.",
token: "Confluence API Token",
token_explained_start: "A ",
token_explained_link1: "Personal API Token",
token_explained_middle: " is required to access Confluence pages. You can ",
token_explained_link2: "create an API Token here",
token_explained_end: ".",
token_desc: "Access token for authentication.",
task_explained: "Once complete, the page content will be available for embedding into workspaces in the document picker."
},
manage: {
documents: "Documents",
"data-connectors": "Data Connectors",
"desktop-only": "Editing these settings are only available on a desktop device. Please access this page on your desktop to continue.",
dismiss: "Dismiss",
editing: "Editing",
},
directory: {
"my-documents": "My Documents",
"new-folder": "New Folder",
"search-document": "Search for document",
"no-documents": "No Documents",
"move-workspace": "Move to Workspace",
name: "Name",
"delete-confirmation": "Are you sure you want to delete these files and folders?\nThis will remove the files from the system and remove them from any existing workspaces automatically.\nThis action is not reversible.",
"removing-message": "Removing {{count}} documents and {{folderCount}} folders. Please wait.",
"move-success": "Successfully moved {{count}} documents.",
date: "Date",
type: "Type",
no_docs: "No Documents",
select_all:"Select All",
deselect_all:"Deselect All",
remove_selected: "Remove Selected",
costs: "*One time cost for embeddings",
save_embed: "Save and Embed",
},
upload: {
"processor-offline": "Document Processor Unavailable",
"processor-offline-desc": "We can't upload your files right now because the document processor is offline. Please try again later.",
"click-upload": "Click to upload or drag and drop",
"file-types": "supports text files, csv's, spreadsheets, audio files, and more!",
"or-submit-link": "or submit a link",
"placeholder-link": "https://example.com",
"fetching": "Fetching...",
"fetch-website": "Fetch website",
"privacy-notice": "These files will be uploaded to the document processor running on this AnythingLLM instance. These files are not sent or shared with a third party.",
},
pinning: {
what_pinning: "What is document pinning?",
pin_explained_block1: "When you <b>pin</b> a document in AnythingLLM we will inject the entire content of the document into your prompt window for your LLM to fully comprehend.",
pin_explained_block2: "This works best with <b>large-context models</b> or small files that are critical to its knowledge-base.",
pin_explained_block3: "If you are not getting the answers you desire from AnythingLLM by default then pinning is a great way to get higher quality answers in a click.",
accept: "Okay, got it"
},
watching: {
what_watching: "What does watching a document do?",
watch_explained_block1: "When you <b>watch</b> a document in AnythingLLM we will <i>automatically</i> sync your document content from it's original source on regular intervals. This will automatically update the content in every workspace where this file is managed.",
watch_explained_block2: "This feature currently supports online-based content and will not be available for manually uploaded documents.",
watch_explained_block3_start: "You can manage what documents are watched from the ",
watch_explained_block3_link: "File manager",
watch_explained_block3_end: " admin view.",
accept: "Okay, got it"
}
},
chat_window:{
welcome: "Welcome to your new workspace.",
get_started: "To get started either",
get_started_default: "To get started",
upload: "upload a document",
or: "or",
send_chat: "send a chat.",
send_message: "Send a message",
attach_file: "Attach a file to this chat",
slash: "View all available slash commands for chatting.",
agents: "View all available agents you can use for chatting.",
text_size: "Change text size.",
microphone: "Speak your prompt.",
send: "Send prompt message to workspace",
},
profile_settings:{
edit_account: "Edit Account",
profile_picture: "Profile Picture",
remove_profile_picture: "Remove Profile Picture",
username: "Username",
username_description: "Username must be only contain lowercase letters, numbers, underscores, and hyphens with no spaces",
new_password: "New Password",
passwort_description: "Password must be at least 8 characters long",
cancel: "Cancel",
update_account: "Update Account",
theme: "Theme Preference",
language: "Preferred language",
},
};
export default TRANSLATIONS;

View file

@ -466,6 +466,184 @@ const TRANSLATIONS = {
vector: "向量資料庫",
anonymous: "已啟用匿名統計資訊",
},
connectors: {
"search-placeholder": "Search data connectors",
"no-connectors": "No data connectors found.",
github: {
name: "GitHub Repo",
description: "Import an entire public or private Github repository in a single click.",
URL: "GitHub Repo URL",
URL_explained: "Url of the GitHub repo you wish to collect.",
token: "Github Access Token",
optional: "optional",
token_explained: "Access Token to prevent rate limiting.",
token_explained_start: "Without a ",
token_explained_link1: "Personal Access Token",
token_explained_middle: ", the GitHub API may limit the number of files that can be collected due to rate limits. You can ",
token_explained_link2: "create a temporary Access Token",
token_explained_end: " to avoid this issue.",
ignores: "File Ignores",
git_ignore: "List in .gitignore format to ignore specific files during collection. Press enter after each entry you want to save.",
task_explained: "Once complete, all files will be available for embedding into workspaces in the document picker.",
branch: "Branch you wish to collect files from.",
branch_loading: "-- loading available branches --",
branch_explained: "Branch you wish to collect files from.",
token_information: "Without filling out the <b>Github Access Token</b> this data connector will only be able to collect the <b>top-level</b> files of the repo due to GitHub's public API rate-limits.",
token_personal: "Get a free Personal Access Token with a GitHub account here."
},
gitlab: {
name: "GitLab Repo",
description: "Import an entire public or private GitLab repository in a single click.",
URL: "GitLab Repo URL",
URL_explained: "URL of the GitLab repo you wish to collect.",
token: "GitLab Access Token",
optional: "optional",
token_explained: "Access Token to prevent rate limiting.",
token_description: "Select additional entities to fetch from the GitLab API.",
token_explained_start: "Without a ",
token_explained_link1: "Personal Access Token",
token_explained_middle: ", the GitLab API may limit the number of files that can be collected due to rate limits. You can ",
token_explained_link2: "create a temporary Access Token",
token_explained_end: " to avoid this issue.",
fetch_issues: "Fetch Issues as Documents",
ignores: "File Ignores",
git_ignore: "List in .gitignore format to ignore specific files during collection. Press enter after each entry you want to save.",
task_explained: "Once complete, all files will be available for embedding into workspaces in the document picker.",
branch: "Branch you wish to collect files from",
branch_loading: "-- loading available branches --",
branch_explained: "Branch you wish to collect files from.",
token_information: "Without filling out the <b>GitLab Access Token</b> this data connector will only be able to collect the <b>top-level</b> files of the repo due to GitLab's public API rate-limits.",
token_personal: "Get a free Personal Access Token with a GitLab account here."
},
youtube: {
name: "YouTube Transcript",
description: "Import the transcription of an entire YouTube video from a link.",
URL: "YouTube Video URL",
URL_explained_start: "Enter the URL of any YouTube video to fetch its transcript. The video must have ",
URL_explained_link: "closed captions",
URL_explained_end: " available.",
task_explained: "Once complete, the transcript will be available for embedding into workspaces in the document picker.",
language: "Transcript Language",
language_explained: "Select the language of the transcript you want to collect.",
loading_languages: "-- loading available languages --"
},
"website-depth": {
name: "Bulk Link Scraper",
description: "Scrape a website and its sub-links up to a certain depth.",
URL: "Website URL",
URL_explained: "URL of the website you want to scrape.",
depth: "Crawl Depth",
depth_explained: "This is the number of child-links that the worker should follow from the origin URL.",
max_pages: "Maximum Pages",
max_pages_explained: "Maximum number of links to scrape.",
task_explained: "Once complete, all scraped content will be available for embedding into workspaces in the document picker."
},
confluence: {
name: "Confluence",
description: "Import an entire Confluence page in a single click.",
deployment_type: "Confluence deployment type",
deployment_type_explained: "Determine if your Confluence instance is hosted on Atlassian cloud or self-hosted.",
base_url: "Confluence base URL",
base_url_explained: "This is the base URL of your Confluence space.",
space_key: "Confluence space key",
space_key_explained: "This is the spaces key of your confluence instance that will be used. Usually begins with ~",
username: "Confluence Username",
username_explained: "Your Confluence username.",
token: "Confluence API Token",
token_explained_start: "A ",
token_explained_link1: "Personal API Token",
token_explained_middle: " is required to access Confluence pages. You can ",
token_explained_link2: "create an API Token here",
token_explained_end: ".",
token_desc: "Access token for authentication.",
task_explained: "Once complete, the page content will be available for embedding into workspaces in the document picker."
},
manage: {
documents: "Documents",
"data-connectors": "Data Connectors",
"desktop-only": "Editing these settings are only available on a desktop device. Please access this page on your desktop to continue.",
dismiss: "Dismiss",
editing: "Editing",
},
directory: {
"my-documents": "My Documents",
"new-folder": "New Folder",
"search-document": "Search for document",
"no-documents": "No Documents",
"move-workspace": "Move to Workspace",
name: "Name",
"delete-confirmation": "Are you sure you want to delete these files and folders?\nThis will remove the files from the system and remove them from any existing workspaces automatically.\nThis action is not reversible.",
"removing-message": "Removing {{count}} documents and {{folderCount}} folders. Please wait.",
"move-success": "Successfully moved {{count}} documents.",
date: "Date",
type: "Type",
no_docs: "No Documents",
select_all:"Select All",
deselect_all:"Deselect All",
remove_selected: "Remove Selected",
costs: "*One time cost for embeddings",
save_embed: "Save and Embed",
},
upload: {
"processor-offline": "Document Processor Unavailable",
"processor-offline-desc": "We can't upload your files right now because the document processor is offline. Please try again later.",
"click-upload": "Click to upload or drag and drop",
"file-types": "supports text files, csv's, spreadsheets, audio files, and more!",
"or-submit-link": "or submit a link",
"placeholder-link": "https://example.com",
"fetching": "Fetching...",
"fetch-website": "Fetch website",
"privacy-notice": "These files will be uploaded to the document processor running on this AnythingLLM instance. These files are not sent or shared with a third party.",
},
pinning: {
what_pinning: "What is document pinning?",
pin_explained_block1: "When you <b>pin</b> a document in AnythingLLM we will inject the entire content of the document into your prompt window for your LLM to fully comprehend.",
pin_explained_block2: "This works best with <b>large-context models</b> or small files that are critical to its knowledge-base.",
pin_explained_block3: "If you are not getting the answers you desire from AnythingLLM by default then pinning is a great way to get higher quality answers in a click.",
accept: "Okay, got it"
},
watching: {
what_watching: "What does watching a document do?",
watch_explained_block1: "When you <b>watch</b> a document in AnythingLLM we will <i>automatically</i> sync your document content from it's original source on regular intervals. This will automatically update the content in every workspace where this file is managed.",
watch_explained_block2: "This feature currently supports online-based content and will not be available for manually uploaded documents.",
watch_explained_block3_start: "You can manage what documents are watched from the ",
watch_explained_block3_link: "File manager",
watch_explained_block3_end: " admin view.",
accept: "Okay, got it"
}
},
chat_window:{
welcome: "Welcome to your new workspace.",
get_started: "To get started either",
get_started_default: "To get started",
upload: "upload a document",
or: "or",
send_chat: "send a chat.",
send_message: "Send a message",
attach_file: "Attach a file to this chat",
slash: "View all available slash commands for chatting.",
agents: "View all available agents you can use for chatting.",
text_size: "Change text size.",
microphone: "Speak your prompt.",
send: "Send prompt message to workspace",
},
profile_settings:{
edit_account: "Edit Account",
profile_picture: "Profile Picture",
remove_profile_picture: "Remove Profile Picture",
username: "Username",
username_description: "Username must be only contain lowercase letters, numbers, underscores, and hyphens with no spaces",
new_password: "New Password",
passwort_description: "Password must be at least 8 characters long",
cancel: "Cancel",
update_account: "Update Account",
theme: "Theme Preference",
language: "Preferred language",
},
};
export default TRANSLATIONS;

Binary file not shown.

Before

(image error) Size: 38 KiB

After

(image error) Size: 2.7 KiB

Binary file not shown.

Before

(image error) Size: 64 KiB

After

(image error) Size: 25 KiB

View file

@ -1,7 +1,15 @@
import React from "react";
import { DefaultBadge } from "../Badges/default";
export default function DefaultSkillPanel({ title, description, image, icon }) {
export default function DefaultSkillPanel({
title,
description,
image,
icon,
enabled = true,
toggleSkill,
skill,
}) {
return (
<div className="p-2">
<div className="flex flex-col gap-y-[18px] max-w-[500px]">
@ -21,10 +29,26 @@ export default function DefaultSkillPanel({ title, description, image, icon }) {
</label>
<DefaultBadge title={title} />
</div>
<label
className={`border-none relative inline-flex items-center ml-auto cursor-pointer`}
>
<input
type="checkbox"
className="peer sr-only"
checked={enabled}
onChange={() => toggleSkill(skill)}
/>
<div className="peer-disabled:opacity-50 pointer-events-none peer h-6 w-11 rounded-full bg-[#CFCFD0] after:absolute after:left-[2px] after:top-[2px] after:h-5 after:w-5 after:rounded-full after:shadow-xl after:border-none after:bg-white after:box-shadow-md after:transition-all after:content-[''] peer-checked:bg-[#32D583] peer-checked:after:translate-x-full peer-checked:after:border-white peer-focus:outline-none peer-focus:ring-4 peer-focus:ring-transparent"></div>
<span className="ml-3 text-sm font-medium"></span>
</label>
</div>
<img src={image} alt={title} className="w-full rounded-md" />
<p className="text-theme-text-secondary text-opacity-60 text-xs font-medium py-1.5">
{description}
<br />
<br />
By default, this skill is enabled, but you can disable it if you don't
want it to be available to the agent.
</p>
</div>
</div>

View file

@ -15,14 +15,16 @@ import ImportedSkillConfig from "./Imported/ImportedSkillConfig";
import { Tooltip } from "react-tooltip";
export default function AdminAgents() {
const formEl = useRef(null);
const [hasChanges, setHasChanges] = useState(false);
const [settings, setSettings] = useState({});
const [selectedSkill, setSelectedSkill] = useState("");
const [agentSkills, setAgentSkills] = useState([]);
const [importedSkills, setImportedSkills] = useState([]);
const [loading, setLoading] = useState(true);
const [showSkillModal, setShowSkillModal] = useState(false);
const formEl = useRef(null);
const [agentSkills, setAgentSkills] = useState([]);
const [importedSkills, setImportedSkills] = useState([]);
const [disabledAgentSkills, setDisabledAgentSkills] = useState([]);
// Alert user if they try to leave the page with unsaved changes
useEffect(() => {
@ -42,17 +44,31 @@ export default function AdminAgents() {
async function fetchSettings() {
const _settings = await System.keys();
const _preferences = await Admin.systemPreferencesByFields([
"disabled_agent_skills",
"default_agent_skills",
"imported_agent_skills",
]);
setSettings({ ..._settings, preferences: _preferences.settings } ?? {});
setAgentSkills(_preferences.settings?.default_agent_skills ?? []);
setDisabledAgentSkills(
_preferences.settings?.disabled_agent_skills ?? []
);
setImportedSkills(_preferences.settings?.imported_agent_skills ?? []);
setLoading(false);
}
fetchSettings();
}, []);
const toggleDefaultSkill = (skillName) => {
setDisabledAgentSkills((prev) => {
const updatedSkills = prev.includes(skillName)
? prev.filter((name) => name !== skillName)
: [...prev, skillName];
setHasChanges(true);
return updatedSkills;
});
};
const toggleAgentSkill = (skillName) => {
setAgentSkills((prev) => {
const updatedSkills = prev.includes(skillName)
@ -93,11 +109,15 @@ export default function AdminAgents() {
if (success) {
const _settings = await System.keys();
const _preferences = await Admin.systemPreferencesByFields([
"disabled_agent_skills",
"default_agent_skills",
"imported_agent_skills",
]);
setSettings({ ..._settings, preferences: _preferences.settings } ?? {});
setAgentSkills(_preferences.settings?.default_agent_skills ?? []);
setDisabledAgentSkills(
_preferences.settings?.disabled_agent_skills ?? []
);
setImportedSkills(_preferences.settings?.imported_agent_skills ?? []);
showToast(`Agent preferences saved successfully.`, "success", {
clear: true,
@ -143,6 +163,11 @@ export default function AdminAgents() {
type="hidden"
value={agentSkills.join(",")}
/>
<input
name="system::disabled_agent_skills"
type="hidden"
value={disabledAgentSkills.join(",")}
/>
{/* Skill settings nav */}
<div hidden={showSkillModal} className="flex flex-col gap-y-[18px]">
@ -152,13 +177,15 @@ export default function AdminAgents() {
</div>
{/* Default skills */}
<SkillList
isDefault={true}
skills={defaultSkills}
selectedSkill={selectedSkill}
handleClick={(skill) => {
setSelectedSkill(skill);
setShowSkillModal(true);
}}
activeSkills={Object.keys(defaultSkills).filter(
(skill) => !disabledAgentSkills.includes(skill)
)}
/>
{/* Configurable skills */}
<SkillList
@ -212,17 +239,35 @@ export default function AdminAgents() {
setImportedSkills={setImportedSkills}
/>
) : (
<SelectedSkillComponent
skill={configurableSkills[selectedSkill]?.skill}
settings={settings}
toggleSkill={toggleAgentSkill}
enabled={agentSkills.includes(
configurableSkills[selectedSkill]?.skill
<>
{defaultSkills?.[selectedSkill] ? (
// The selected skill is a default skill - show the default skill panel
<SelectedSkillComponent
skill={defaultSkills[selectedSkill]?.skill}
settings={settings}
toggleSkill={toggleDefaultSkill}
enabled={
!disabledAgentSkills.includes(
defaultSkills[selectedSkill]?.skill
)
}
setHasChanges={setHasChanges}
{...defaultSkills[selectedSkill]}
/>
) : (
// The selected skill is a configurable skill - show the configurable skill panel
<SelectedSkillComponent
skill={configurableSkills[selectedSkill]?.skill}
settings={settings}
toggleSkill={toggleAgentSkill}
enabled={agentSkills.includes(
configurableSkills[selectedSkill]?.skill
)}
setHasChanges={setHasChanges}
{...configurableSkills[selectedSkill]}
/>
)}
setHasChanges={setHasChanges}
{...(configurableSkills[selectedSkill] ||
defaultSkills[selectedSkill])}
/>
</>
)}
</>
) : (
@ -258,6 +303,11 @@ export default function AdminAgents() {
type="hidden"
value={agentSkills.join(",")}
/>
<input
name="system::disabled_agent_skills"
type="hidden"
value={disabledAgentSkills.join(",")}
/>
{/* Skill settings nav */}
<div className="flex flex-col gap-y-[18px]">
@ -268,10 +318,12 @@ export default function AdminAgents() {
{/* Default skills list */}
<SkillList
isDefault={true}
skills={defaultSkills}
selectedSkill={selectedSkill}
handleClick={setSelectedSkill}
activeSkills={Object.keys(defaultSkills).filter(
(skill) => !disabledAgentSkills.includes(skill)
)}
/>
{/* Configurable skills */}
<SkillList
@ -304,17 +356,35 @@ export default function AdminAgents() {
setImportedSkills={setImportedSkills}
/>
) : (
<SelectedSkillComponent
skill={configurableSkills[selectedSkill]?.skill}
settings={settings}
toggleSkill={toggleAgentSkill}
enabled={agentSkills.includes(
configurableSkills[selectedSkill]?.skill
<>
{defaultSkills?.[selectedSkill] ? (
// The selected skill is a default skill - show the default skill panel
<SelectedSkillComponent
skill={defaultSkills[selectedSkill]?.skill}
settings={settings}
toggleSkill={toggleDefaultSkill}
enabled={
!disabledAgentSkills.includes(
defaultSkills[selectedSkill]?.skill
)
}
setHasChanges={setHasChanges}
{...defaultSkills[selectedSkill]}
/>
) : (
// The selected skill is a configurable skill - show the configurable skill panel
<SelectedSkillComponent
skill={configurableSkills[selectedSkill]?.skill}
settings={settings}
toggleSkill={toggleAgentSkill}
enabled={agentSkills.includes(
configurableSkills[selectedSkill]?.skill
)}
setHasChanges={setHasChanges}
{...configurableSkills[selectedSkill]}
/>
)}
setHasChanges={setHasChanges}
{...(configurableSkills[selectedSkill] ||
defaultSkills[selectedSkill])}
/>
</>
)}
</>
) : (

View file

@ -23,21 +23,24 @@ export const defaultSkills = {
component: DefaultSkillPanel,
icon: Brain,
image: RAGImage,
skill: "rag-memory",
},
"view-summarize": {
"document-summarizer": {
title: "View & summarize documents",
description:
"Allow the agent to list and summarize the content of workspace files currently embedded.",
component: DefaultSkillPanel,
icon: File,
image: SummarizeImage,
skill: "document-summarizer",
},
"scrape-websites": {
"web-scraping": {
title: "Scrape websites",
description: "Allow the agent to visit and scrape the content of websites.",
component: DefaultSkillPanel,
icon: Browser,
image: ScrapeWebsitesImage,
skill: "web-scraping",
},
};

View file

@ -38,7 +38,6 @@ import AzureAiOptions from "@/components/LLMSelection/AzureAiOptions";
import AnthropicAiOptions from "@/components/LLMSelection/AnthropicAiOptions";
import LMStudioOptions from "@/components/LLMSelection/LMStudioOptions";
import LocalAiOptions from "@/components/LLMSelection/LocalAiOptions";
import NativeLLMOptions from "@/components/LLMSelection/NativeLLMOptions";
import GeminiLLMOptions from "@/components/LLMSelection/GeminiLLMOptions";
import OllamaLLMOptions from "@/components/LLMSelection/OllamaLLMOptions";
import NovitaLLMOptions from "@/components/LLMSelection/NovitaLLMOptions";
@ -97,12 +96,12 @@ export const AVAILABLE_LLM_PROVIDERS = [
requiredConfig: ["GeminiLLMApiKey"],
},
{
name: "Nvidia NIM",
name: "NVIDIA NIM",
value: "nvidia-nim",
logo: NvidiaNimLogo,
options: (settings) => <NvidiaNimOptions settings={settings} />,
description:
"Run full parameter LLMs directly on your GPU using Nvidia's inference microservice via Docker.",
"Run full parameter LLMs directly on your NVIDIA RTX GPU using NVIDIA NIM.",
requiredConfig: ["NvidiaNimLLMBasePath"],
},
{
@ -290,16 +289,6 @@ export const AVAILABLE_LLM_PROVIDERS = [
description: "Run xAI's powerful LLMs like Grok-2 and more.",
requiredConfig: ["XAIApiKey", "XAIModelPref"],
},
{
name: "Native",
value: "native",
logo: AnythingLLMIcon,
options: (settings) => <NativeLLMOptions settings={settings} />,
description:
"Use a downloaded custom Llama model for chatting on this AnythingLLM instance.",
requiredConfig: [],
},
];
export default function GeneralLLMPreference() {
@ -312,7 +301,6 @@ export default function GeneralLLMPreference() {
const [selectedLLM, setSelectedLLM] = useState(null);
const [searchMenuOpen, setSearchMenuOpen] = useState(false);
const searchInputRef = useRef(null);
const isHosted = window.location.hostname.includes("useanything.com");
const { t } = useTranslation();
const handleSubmit = async (e) => {
@ -449,7 +437,6 @@ export default function GeneralLLMPreference() {
</div>
<div className="flex-1 pl-4 pr-2 flex flex-col gap-y-1 overflow-y-auto white-scrollbar pb-4">
{filteredLLMs.map((llm) => {
if (llm.value === "native" && isHosted) return null;
return (
<LLMItem
key={llm.name}

View file

@ -8,6 +8,7 @@ import {
EMBEDDING_ENGINE_PRIVACY,
LLM_SELECTION_PRIVACY,
VECTOR_DB_PRIVACY,
FALLBACKS,
} from "@/pages/OnboardingFlow/Steps/DataHandling";
import { useTranslation } from "react-i18next";
@ -67,6 +68,13 @@ function ThirdParty({ settings }) {
const vectorDb = settings?.VectorDB || "lancedb";
const { t } = useTranslation();
const LLMSelection =
LLM_SELECTION_PRIVACY?.[llmChoice] || FALLBACKS.LLM(llmChoice);
const EmbeddingEngine =
EMBEDDING_ENGINE_PRIVACY?.[embeddingEngine] ||
FALLBACKS.EMBEDDING(embeddingEngine);
const VectorDb = VECTOR_DB_PRIVACY?.[vectorDb] || FALLBACKS.VECTOR(vectorDb);
return (
<div className="py-8 w-full flex items-start justify-center flex-col gap-y-6 border-b-2 border-theme-sidebar-border">
<div className="flex flex-col gap-8">
@ -76,16 +84,16 @@ function ThirdParty({ settings }) {
</div>
<div className="flex items-center gap-2.5">
<img
src={LLM_SELECTION_PRIVACY[llmChoice].logo}
src={LLMSelection.logo}
alt="LLM Logo"
className="w-8 h-8 rounded"
/>
<p className="text-theme-text-primary text-sm font-bold">
{LLM_SELECTION_PRIVACY[llmChoice].name}
{LLMSelection.name}
</p>
</div>
<ul className="flex flex-col list-disc ml-4">
{LLM_SELECTION_PRIVACY[llmChoice].description.map((desc) => (
{LLMSelection.description.map((desc) => (
<li className="text-theme-text-secondary text-sm">{desc}</li>
))}
</ul>
@ -96,20 +104,18 @@ function ThirdParty({ settings }) {
</div>
<div className="flex items-center gap-2.5">
<img
src={EMBEDDING_ENGINE_PRIVACY[embeddingEngine].logo}
src={EmbeddingEngine.logo}
alt="LLM Logo"
className="w-8 h-8 rounded"
/>
<p className="text-theme-text-primary text-sm font-bold">
{EMBEDDING_ENGINE_PRIVACY[embeddingEngine].name}
{EmbeddingEngine.name}
</p>
</div>
<ul className="flex flex-col list-disc ml-4">
{EMBEDDING_ENGINE_PRIVACY[embeddingEngine].description.map(
(desc) => (
<li className="text-theme-text-secondary text-sm">{desc}</li>
)
)}
{EmbeddingEngine.description.map((desc) => (
<li className="text-theme-text-secondary text-sm">{desc}</li>
))}
</ul>
</div>
@ -119,16 +125,16 @@ function ThirdParty({ settings }) {
</div>
<div className="flex items-center gap-2.5">
<img
src={VECTOR_DB_PRIVACY[vectorDb].logo}
src={VectorDb.logo}
alt="LLM Logo"
className="w-8 h-8 rounded"
/>
<p className="text-theme-text-primary text-sm font-bold">
{VECTOR_DB_PRIVACY[vectorDb].name}
{VectorDb.name}
</p>
</div>
<ul className="flex flex-col list-disc ml-4">
{VECTOR_DB_PRIVACY[vectorDb].description.map((desc) => (
{VectorDb.description.map((desc) => (
<li className="text-theme-text-secondary text-sm">{desc}</li>
))}
</ul>

View file

@ -78,9 +78,9 @@ export const LLM_SELECTION_PRIVACY = {
logo: GeminiLogo,
},
"nvidia-nim": {
name: "Nvidia NIM",
name: "NVIDIA NIM",
description: [
"Your model and chats are only accessible on the machine running the Nvidia NIM service",
"Your model and chats are only accessible on the machine running the NVIDIA NIM",
],
logo: NvidiaNimLogo,
},
@ -105,13 +105,6 @@ export const LLM_SELECTION_PRIVACY = {
],
logo: OllamaLogo,
},
native: {
name: "Custom Llama Model",
description: [
"Your model and chats are only accessible on this AnythingLLM instance",
],
logo: AnythingLLMIcon,
},
togetherai: {
name: "TogetherAI",
description: [
@ -388,6 +381,30 @@ export const EMBEDDING_ENGINE_PRIVACY = {
},
};
export const FALLBACKS = {
LLM: (provider) => ({
name: "Unknown",
description: [
`"${provider}" has no known data handling policy defined in AnythingLLM`,
],
logo: AnythingLLMIcon,
}),
EMBEDDING: (provider) => ({
name: "Unknown",
description: [
`"${provider}" has no known data handling policy defined in AnythingLLM`,
],
logo: AnythingLLMIcon,
}),
VECTOR: (provider) => ({
name: "Unknown",
description: [
`"${provider}" has no known data handling policy defined in AnythingLLM`,
],
logo: AnythingLLMIcon,
}),
};
export default function DataHandling({ setHeader, setForwardBtn, setBackBtn }) {
const [llmChoice, setLLMChoice] = useState("openai");
const [loading, setLoading] = useState(true);
@ -425,6 +442,13 @@ export default function DataHandling({ setHeader, setForwardBtn, setBackBtn }) {
</div>
);
const LLMSelection =
LLM_SELECTION_PRIVACY?.[llmChoice] || FALLBACKS.LLM(llmChoice);
const EmbeddingEngine =
EMBEDDING_ENGINE_PRIVACY?.[embeddingEngine] ||
FALLBACKS.EMBEDDING(embeddingEngine);
const VectorDb = VECTOR_DB_PRIVACY?.[vectorDb] || FALLBACKS.VECTOR(vectorDb);
return (
<div className="w-full flex items-center justify-center flex-col gap-y-6">
<div className="p-8 flex flex-col gap-8">
@ -434,16 +458,16 @@ export default function DataHandling({ setHeader, setForwardBtn, setBackBtn }) {
</div>
<div className="flex items-center gap-2.5">
<img
src={LLM_SELECTION_PRIVACY[llmChoice].logo}
src={LLMSelection.logo}
alt="LLM Logo"
className="w-8 h-8 rounded"
/>
<p className="text-theme-text-primary text-sm font-bold">
{LLM_SELECTION_PRIVACY[llmChoice].name}
{LLMSelection.name}
</p>
</div>
<ul className="flex flex-col list-disc ml-4">
{LLM_SELECTION_PRIVACY[llmChoice].description.map((desc) => (
{LLMSelection.description.map((desc) => (
<li className="text-theme-text-primary text-sm">{desc}</li>
))}
</ul>
@ -454,20 +478,18 @@ export default function DataHandling({ setHeader, setForwardBtn, setBackBtn }) {
</div>
<div className="flex items-center gap-2.5">
<img
src={EMBEDDING_ENGINE_PRIVACY[embeddingEngine].logo}
src={EmbeddingEngine.logo}
alt="LLM Logo"
className="w-8 h-8 rounded"
/>
<p className="text-theme-text-primary text-sm font-bold">
{EMBEDDING_ENGINE_PRIVACY[embeddingEngine].name}
{EmbeddingEngine.name}
</p>
</div>
<ul className="flex flex-col list-disc ml-4">
{EMBEDDING_ENGINE_PRIVACY[embeddingEngine].description.map(
(desc) => (
<li className="text-theme-text-primary text-sm">{desc}</li>
)
)}
{EmbeddingEngine.description.map((desc) => (
<li className="text-theme-text-primary text-sm">{desc}</li>
))}
</ul>
</div>
@ -477,16 +499,16 @@ export default function DataHandling({ setHeader, setForwardBtn, setBackBtn }) {
</div>
<div className="flex items-center gap-2.5">
<img
src={VECTOR_DB_PRIVACY[vectorDb].logo}
src={VectorDb.logo}
alt="LLM Logo"
className="w-8 h-8 rounded"
/>
<p className="text-theme-text-primary text-sm font-bold">
{VECTOR_DB_PRIVACY[vectorDb].name}
{VectorDb.name}
</p>
</div>
<ul className="flex flex-col list-disc ml-4">
{VECTOR_DB_PRIVACY[vectorDb].description.map((desc) => (
{VectorDb.description.map((desc) => (
<li className="text-theme-text-primary text-sm">{desc}</li>
))}
</ul>

View file

@ -32,7 +32,6 @@ import AzureAiOptions from "@/components/LLMSelection/AzureAiOptions";
import AnthropicAiOptions from "@/components/LLMSelection/AnthropicAiOptions";
import LMStudioOptions from "@/components/LLMSelection/LMStudioOptions";
import LocalAiOptions from "@/components/LLMSelection/LocalAiOptions";
import NativeLLMOptions from "@/components/LLMSelection/NativeLLMOptions";
import GeminiLLMOptions from "@/components/LLMSelection/GeminiLLMOptions";
import OllamaLLMOptions from "@/components/LLMSelection/OllamaLLMOptions";
import MistralOptions from "@/components/LLMSelection/MistralOptions";
@ -93,12 +92,12 @@ const LLMS = [
description: "Google's largest and most capable AI model",
},
{
name: "Nvidia NIM",
name: "NVIDIA NIM",
value: "nvidia-nim",
logo: NvidiaNimLogo,
options: (settings) => <NvidiaNimOptions settings={settings} />,
description:
"Run full parameter LLMs directly on your GPU using Nvidia's inference microservice via Docker.",
"Run full parameter LLMs directly on your NVIDIA RTX GPU using NVIDIA NIM.",
},
{
name: "HuggingFace",
@ -247,14 +246,6 @@ const LLMS = [
options: (settings) => <XAILLMOptions settings={settings} />,
description: "Run xAI's powerful LLMs like Grok-2 and more.",
},
{
name: "Native",
value: "native",
logo: AnythingLLMIcon,
options: (settings) => <NativeLLMOptions settings={settings} />,
description:
"Use a downloaded custom Llama model for chatting on this AnythingLLM instance.",
},
];
export default function LLMPreference({

View file

@ -4,11 +4,12 @@
import { createPortal } from "react-dom";
import ModalWrapper from "@/components/ModalWrapper";
import { useModal } from "@/hooks/useModal";
import { X } from "@phosphor-icons/react";
import { X, Gear } from "@phosphor-icons/react";
import System from "@/models/system";
import showToast from "@/utils/toast";
import { useEffect, useState } from "react";
export default function WorkspaceLLM({
export default function AgentLLMItem({
llm,
availableLLMs,
settings,
@ -17,18 +18,31 @@ export default function WorkspaceLLM({
}) {
const { isOpen, openModal, closeModal } = useModal();
const { name, value, logo, description } = llm;
const [currentSettings, setCurrentSettings] = useState(settings);
useEffect(() => {
async function getSettings() {
if (isOpen) {
const _settings = await System.keys();
setCurrentSettings(_settings ?? {});
}
}
getSettings();
}, [isOpen]);
function handleProviderSelection() {
// Determine if provider needs additional setup because its minimum required keys are
// not yet set in settings.
const requiresAdditionalSetup = (llm.requiredConfig || []).some(
(key) => !settings[key]
);
if (requiresAdditionalSetup) {
openModal();
return;
if (!checked) {
const requiresAdditionalSetup = (llm.requiredConfig || []).some(
(key) => !currentSettings[key]
);
if (requiresAdditionalSetup) {
openModal();
return;
}
onClick(value);
}
onClick(value);
}
return (
@ -47,16 +61,30 @@ export default function WorkspaceLLM({
readOnly={true}
formNoValidate={true}
/>
<div className="flex gap-x-4 items-center">
<img
src={logo}
alt={`${name} logo`}
className="w-10 h-10 rounded-md"
/>
<div className="flex flex-col">
<div className="text-sm font-semibold text-white">{name}</div>
<div className="mt-1 text-xs text-white/60">{description}</div>
<div className="flex gap-x-4 items-center justify-between">
<div className="flex gap-x-4 items-center">
<img
src={logo}
alt={`${name} logo`}
className="w-10 h-10 rounded-md"
/>
<div className="flex flex-col">
<div className="text-sm font-semibold text-white">{name}</div>
<div className="mt-1 text-xs text-white/60">{description}</div>
</div>
</div>
{checked && value !== "none" && (
<button
onClick={(e) => {
e.preventDefault();
openModal();
}}
className="p-2 text-white/60 hover:text-white hover:bg-theme-bg-hover rounded-md transition-all duration-300"
title="Edit Settings"
>
<Gear size={20} weight="bold" />
</button>
)}
</div>
</div>
<SetupProvider
@ -65,6 +93,7 @@ export default function WorkspaceLLM({
provider={value}
closeModal={closeModal}
postSubmit={onClick}
settings={currentSettings}
/>
</>
);
@ -76,6 +105,7 @@ function SetupProvider({
provider,
closeModal,
postSubmit,
settings,
}) {
if (!isOpen) return null;
const LLMOption = availableLLMs.find((llm) => llm.value === provider);
@ -107,7 +137,7 @@ function SetupProvider({
<div className="relative p-6 border-b rounded-t border-theme-modal-border">
<div className="w-full flex gap-x-2 items-center">
<h3 className="text-xl font-semibold text-white overflow-hidden overflow-ellipsis whitespace-nowrap">
Setup {LLMOption.name}
{LLMOption.name} Settings
</h3>
</div>
<button
@ -120,12 +150,14 @@ function SetupProvider({
</div>
<form id="provider-form" onSubmit={handleUpdate}>
<div className="px-7 py-6">
<div className="space-y-6 max-h-[60vh] overflow-y-auto pr-2">
<div className="space-y-6 max-h-[60vh] overflow-y-auto p-1">
<p className="text-sm text-white/60">
To use {LLMOption.name} as this workspace's LLM you need to
set it up first.
To use {LLMOption.name} as this workspace's agent LLM you need
to set it up first.
</p>
<div>{LLMOption.options({ credentialsOnly: true })}</div>
<div>
{LLMOption.options(settings, { credentialsOnly: true })}
</div>
</div>
</div>
<div className="flex justify-between items-center mt-6 pt-6 border-t border-theme-modal-border px-7 pb-6">

View file

@ -16,7 +16,7 @@ export default function ChatModelSelection({
if (loading) {
return (
<div>
<div className="flex flex-col">
<div className="flex flex-col mt-6">
<label htmlFor="name" className="block input-label">
{t("chat.model.title")}
</label>
@ -40,7 +40,7 @@ export default function ChatModelSelection({
return (
<div>
<div className="flex flex-col">
<div className="flex flex-col mt-6">
<label htmlFor="name" className="block input-label">
{t("chat.model.title")}
</label>

View file

@ -4,9 +4,10 @@
import { createPortal } from "react-dom";
import ModalWrapper from "@/components/ModalWrapper";
import { useModal } from "@/hooks/useModal";
import { X } from "@phosphor-icons/react";
import { X, Gear } from "@phosphor-icons/react";
import System from "@/models/system";
import showToast from "@/utils/toast";
import { useEffect, useState } from "react";
export default function WorkspaceLLM({
llm,
@ -17,18 +18,31 @@ export default function WorkspaceLLM({
}) {
const { isOpen, openModal, closeModal } = useModal();
const { name, value, logo, description } = llm;
const [currentSettings, setCurrentSettings] = useState(settings);
useEffect(() => {
async function getSettings() {
if (isOpen) {
const _settings = await System.keys();
setCurrentSettings(_settings ?? {});
}
}
getSettings();
}, [isOpen]);
function handleProviderSelection() {
// Determine if provider needs additional setup because its minimum required keys are
// not yet set in settings.
const requiresAdditionalSetup = (llm.requiredConfig || []).some(
(key) => !settings[key]
);
if (requiresAdditionalSetup) {
openModal();
return;
if (!checked) {
const requiresAdditionalSetup = (llm.requiredConfig || []).some(
(key) => !currentSettings[key]
);
if (requiresAdditionalSetup) {
openModal();
return;
}
onClick(value);
}
onClick(value);
}
return (
@ -47,16 +61,30 @@ export default function WorkspaceLLM({
readOnly={true}
formNoValidate={true}
/>
<div className="flex gap-x-4 items-center">
<img
src={logo}
alt={`${name} logo`}
className="w-10 h-10 rounded-md"
/>
<div className="flex flex-col">
<div className="text-sm font-semibold text-white">{name}</div>
<div className="mt-1 text-xs text-white/60">{description}</div>
<div className="flex gap-x-4 items-center justify-between">
<div className="flex gap-x-4 items-center">
<img
src={logo}
alt={`${name} logo`}
className="w-10 h-10 rounded-md"
/>
<div className="flex flex-col">
<div className="text-sm font-semibold text-white">{name}</div>
<div className="mt-1 text-xs text-white/60">{description}</div>
</div>
</div>
{checked && (
<button
onClick={(e) => {
e.preventDefault();
openModal();
}}
className="p-2 text-white/60 hover:text-white hover:bg-theme-bg-hover rounded-md transition-all duration-300"
title="Edit Settings"
>
<Gear size={20} weight="bold" />
</button>
)}
</div>
</div>
<SetupProvider
@ -65,6 +93,7 @@ export default function WorkspaceLLM({
provider={value}
closeModal={closeModal}
postSubmit={onClick}
settings={currentSettings}
/>
</>
);
@ -76,6 +105,7 @@ function SetupProvider({
provider,
closeModal,
postSubmit,
settings,
}) {
if (!isOpen) return null;
const LLMOption = availableLLMs.find((llm) => llm.value === provider);
@ -107,7 +137,7 @@ function SetupProvider({
<div className="relative p-6 border-b rounded-t border-theme-modal-border">
<div className="w-full flex gap-x-2 items-center">
<h3 className="text-xl font-semibold text-white overflow-hidden overflow-ellipsis whitespace-nowrap">
Setup {LLMOption.name}
{LLMOption.name} Settings
</h3>
</div>
<button
@ -120,12 +150,14 @@ function SetupProvider({
</div>
<form id="provider-form" onSubmit={handleUpdate}>
<div className="px-7 py-6">
<div className="space-y-6 max-h-[60vh] overflow-y-auto pr-2">
<div className="space-y-6 max-h-[60vh] overflow-y-auto p-1">
<p className="text-sm text-white/60">
To use {LLMOption.name} as this workspace's LLM you need to
set it up first.
</p>
<div>{LLMOption.options({ credentialsOnly: true })}</div>
<div>
{LLMOption.options(settings, { credentialsOnly: true })}
</div>
</div>
</div>
<div className="flex justify-between items-center mt-6 pt-6 border-t border-theme-modal-border px-7 pb-6">
@ -141,7 +173,7 @@ function SetupProvider({
form="provider-form"
className="transition-all duration-300 bg-white text-black hover:opacity-60 px-4 py-2 rounded-lg text-sm"
>
Save {LLMOption.name} settings
Save settings
</button>
</div>
</form>

View file

@ -18,7 +18,7 @@ const FREE_FORM_LLM_SELECTION = ["bedrock", "azure", "generic-openai"];
const NO_MODEL_SELECTION = ["default", "huggingface"];
// Some providers we just fully disable for ease of use.
const DISABLED_PROVIDERS = ["native"];
const DISABLED_PROVIDERS = [];
const LLM_DEFAULT = {
name: "System default",

View file

@ -1,6 +1,6 @@
import { encode as HTMLEncode } from "he";
import markdownIt from "markdown-it";
import markdownItKatex from "markdown-it-katex";
import markdownItKatexPlugin from "./plugins/markdown-katex";
import hljs from "highlight.js";
import "./themes/github-dark.css";
import "./themes/github.css";
@ -66,7 +66,7 @@ markdown.renderer.rules.image = function (tokens, idx) {
return `<div class="w-full max-w-[800px]"><img src="${src}" alt="${alt}" class="w-full h-auto" /></div>`;
};
markdown.use(markdownItKatex);
markdown.use(markdownItKatexPlugin);
export default function renderMarkdown(text = "") {
return markdown.render(text);

View file

@ -0,0 +1,277 @@
import katex from "katex";
// Test if potential opening or closing delimieter
// Assumes that there is a "$" at state.src[pos]
function isValidDelim(state, pos) {
var prevChar,
nextChar,
max = state.posMax,
can_open = true,
can_close = true;
prevChar = pos > 0 ? state.src.charCodeAt(pos - 1) : -1;
nextChar = pos + 1 <= max ? state.src.charCodeAt(pos + 1) : -1;
// Only apply whitespace rules if we're dealing with $ delimiter
if (state.src[pos] === "$") {
if (
prevChar === 0x20 /* " " */ ||
prevChar === 0x09 /* \t */ ||
(nextChar >= 0x30 /* "0" */ && nextChar <= 0x39) /* "9" */
) {
can_close = false;
}
if (nextChar === 0x20 /* " " */ || nextChar === 0x09 /* \t */) {
can_open = false;
}
}
return {
can_open: can_open,
can_close: can_close,
};
}
function math_inline(state, silent) {
var start, match, token, res, pos, esc_count;
// Only process $ and \( delimiters for inline math
if (
state.src[state.pos] !== "$" &&
(state.src[state.pos] !== "\\" || state.src[state.pos + 1] !== "(")
) {
return false;
}
// Handle \( ... \) case separately
if (state.src[state.pos] === "\\" && state.src[state.pos + 1] === "(") {
start = state.pos + 2;
match = start;
while ((match = state.src.indexOf("\\)", match)) !== -1) {
pos = match - 1;
while (state.src[pos] === "\\") {
pos -= 1;
}
if ((match - pos) % 2 == 1) {
break;
}
match += 1;
}
if (match === -1) {
if (!silent) {
state.pending += "\\(";
}
state.pos = start;
return true;
}
if (!silent) {
token = state.push("math_inline", "math", 0);
token.markup = "\\(";
token.content = state.src.slice(start, match);
}
state.pos = match + 2;
return true;
}
res = isValidDelim(state, state.pos);
if (!res.can_open) {
if (!silent) {
state.pending += "$";
}
state.pos += 1;
return true;
}
// First check for and bypass all properly escaped delimieters
// This loop will assume that the first leading backtick can not
// be the first character in state.src, which is known since
// we have found an opening delimieter already.
start = state.pos + 1;
match = start;
while ((match = state.src.indexOf("$", match)) !== -1) {
// Found potential $, look for escapes, pos will point to
// first non escape when complete
pos = match - 1;
while (state.src[pos] === "\\") {
pos -= 1;
}
// Even number of escapes, potential closing delimiter found
if ((match - pos) % 2 == 1) {
break;
}
match += 1;
}
// No closing delimter found. Consume $ and continue.
if (match === -1) {
if (!silent) {
state.pending += "$";
}
state.pos = start;
return true;
}
// Check if we have empty content, ie: $$. Do not parse.
if (match - start === 0) {
if (!silent) {
state.pending += "$$";
}
state.pos = start + 1;
return true;
}
// Check for valid closing delimiter
res = isValidDelim(state, match);
if (!res.can_close) {
if (!silent) {
state.pending += "$";
}
state.pos = start;
return true;
}
if (!silent) {
token = state.push("math_inline", "math", 0);
token.markup = "$";
token.content = state.src.slice(start, match);
}
state.pos = match + 1;
return true;
}
function math_block(state, start, end, silent) {
var firstLine,
lastLine,
next,
lastPos,
found = false,
token,
pos = state.bMarks[start] + state.tShift[start],
max = state.eMarks[start];
// Check for $$, \[, or standalone [ as opening delimiters
if (pos + 1 > max) {
return false;
}
let openDelim = state.src.slice(pos, pos + 2);
let isDoubleDollar = openDelim === "$$";
let isLatexBracket = openDelim === "\\[";
if (!isDoubleDollar && !isLatexBracket) {
return false;
}
// Determine the closing delimiter and position adjustment
let delimiter, posAdjust;
if (isDoubleDollar) {
delimiter = "$$";
posAdjust = 2;
} else if (isLatexBracket) {
delimiter = "\\]";
posAdjust = 2;
}
pos += posAdjust;
firstLine = state.src.slice(pos, max);
if (silent) {
return true;
}
if (firstLine.trim().slice(-delimiter.length) === delimiter) {
// Single line expression
firstLine = firstLine.trim().slice(0, -delimiter.length);
found = true;
}
for (next = start; !found; ) {
next++;
if (next >= end) {
break;
}
pos = state.bMarks[next] + state.tShift[next];
max = state.eMarks[next];
if (pos < max && state.tShift[next] < state.blkIndent) {
// non-empty line with negative indent should stop the list:
break;
}
if (
state.src.slice(pos, max).trim().slice(-delimiter.length) === delimiter
) {
lastPos = state.src.slice(0, max).lastIndexOf(delimiter);
lastLine = state.src.slice(pos, lastPos);
found = true;
}
}
state.line = next + 1;
token = state.push("math_block", "math", 0);
token.block = true;
token.content =
(firstLine && firstLine.trim() ? firstLine + "\n" : "") +
state.getLines(start + 1, next, state.tShift[start], true) +
(lastLine && lastLine.trim() ? lastLine : "");
token.map = [start, state.line];
token.markup = delimiter;
return true;
}
export default function math_plugin(md, options) {
// Default options
options = options || {};
var katexInline = function (latex) {
options.displayMode = false;
try {
latex = latex
.replace(/^\[(.*)\]$/, "$1")
.replace(/^\\\((.*)\\\)$/, "$1")
.replace(/^\\\[(.*)\\\]$/, "$1");
return katex.renderToString(latex, options);
} catch (error) {
if (options.throwOnError) {
console.log(error);
}
return latex;
}
};
var inlineRenderer = function (tokens, idx) {
return katexInline(tokens[idx].content);
};
var katexBlock = function (latex) {
options.displayMode = true;
try {
// Remove surrounding delimiters if present
latex = latex.replace(/^\[(.*)\]$/, "$1").replace(/^\\\[(.*)\\\]$/, "$1");
return "<p>" + katex.renderToString(latex, options) + "</p>";
} catch (error) {
if (options.throwOnError) {
console.log(error);
}
return latex;
}
};
var blockRenderer = function (tokens, idx) {
return katexBlock(tokens[idx].content) + "\n";
};
md.inline.ruler.after("escape", "math_inline", math_inline);
md.block.ruler.after("blockquote", "math_block", math_block, {
alt: ["paragraph", "reference", "blockquote", "list"],
});
md.renderer.rules.math_inline = inlineRenderer;
md.renderer.rules.math_block = blockRenderer;
}

View file

@ -2483,13 +2483,6 @@ lru-cache@^5.1.1:
dependencies:
yallist "^3.0.2"
markdown-it-katex@^2.0.3:
version "2.0.3"
resolved "https://registry.yarnpkg.com/markdown-it-katex/-/markdown-it-katex-2.0.3.tgz#d7b86a1aea0b9d6496fab4e7919a18fdef589c39"
integrity sha512-nUkkMtRWeg7OpdflamflE/Ho/pWl64Lk9wNBKOmaj33XkQdumhXAIYhI0WO03GeiycPCsxbmX536V5NEXpC3Ng==
dependencies:
katex "^0.6.0"
markdown-it@^13.0.1:
version "13.0.2"
resolved "https://registry.yarnpkg.com/markdown-it/-/markdown-it-13.0.2.tgz#1bc22e23379a6952e5d56217fbed881e0c94d536"

View file

@ -97,7 +97,7 @@ SIG_SALT='salt' # Please generate random string at least 32 chars long.
# LLM_PROVIDER='novita'
# NOVITA_LLM_API_KEY='your-novita-api-key-here' check on https://novita.ai/settings#key-management
# NOVITA_LLM_MODEL_PREF='gryphe/mythomax-l2-13b'
# NOVITA_LLM_MODEL_PREF='deepseek/deepseek-r1'
# LLM_PROVIDER='cohere'
# COHERE_API_KEY=

View file

@ -374,6 +374,9 @@ function adminEndpoints(app) {
case "default_agent_skills":
requestedSettings[label] = safeJsonParse(setting?.value, []);
break;
case "disabled_agent_skills":
requestedSettings[label] = safeJsonParse(setting?.value, []);
break;
case "imported_agent_skills":
requestedSettings[label] = ImportedPlugin.listImportedPlugins();
break;
@ -440,6 +443,12 @@ function adminEndpoints(app) {
?.value,
[]
) || [],
disabled_agent_skills:
safeJsonParse(
(await SystemSettings.get({ label: "disabled_agent_skills" }))
?.value,
[]
) || [],
imported_agent_skills: ImportedPlugin.listImportedPlugins(),
custom_app_name:
(await SystemSettings.get({ label: "custom_app_name" }))?.value ||

View file

@ -24,6 +24,7 @@ const SystemSettings = {
"agent_search_provider",
"agent_sql_connections",
"default_agent_skills",
"disabled_agent_skills",
"imported_agent_skills",
"custom_app_name",
"feature_flags",
@ -40,6 +41,7 @@ const SystemSettings = {
"text_splitter_chunk_overlap",
"agent_search_provider",
"default_agent_skills",
"disabled_agent_skills",
"agent_sql_connections",
"custom_app_name",
@ -125,6 +127,15 @@ const SystemSettings = {
return JSON.stringify([]);
}
},
disabled_agent_skills: (updates) => {
try {
const skills = updates.split(",").filter((skill) => !!skill);
return JSON.stringify(skills);
} catch (e) {
console.error(`Could not validate disabled agent skills.`);
return JSON.stringify([]);
}
},
agent_sql_connections: async (updates) => {
const existingConnections = safeJsonParse(
(await SystemSettings.get({ label: "agent_sql_connections" }))?.value,
@ -490,10 +501,6 @@ const SystemSettings = {
GroqApiKey: !!process.env.GROQ_API_KEY,
GroqModelPref: process.env.GROQ_MODEL_PREF,
// Native LLM Keys
NativeLLMModelPref: process.env.NATIVE_LLM_MODEL_PREF,
NativeLLMTokenLimit: process.env.NATIVE_LLM_MODEL_TOKEN_LIMIT,
// HuggingFace Dedicated Inference
HuggingFaceLLMEndpoint: process.env.HUGGING_FACE_LLM_ENDPOINT,
HuggingFaceLLMAccessToken: !!process.env.HUGGING_FACE_LLM_API_KEY,
@ -547,7 +554,7 @@ const SystemSettings = {
XAIApiKey: !!process.env.XAI_LLM_API_KEY,
XAIModelPref: process.env.XAI_LLM_MODEL_PREF,
// Nvidia NIM Keys
// NVIDIA NIM Keys
NvidiaNimLLMBasePath: process.env.NVIDIA_NIM_LLM_BASE_PATH,
NvidiaNimLLMModelPref: process.env.NVIDIA_NIM_LLM_MODEL_PREF,
NvidiaNimLLMTokenLimit: process.env.NVIDIA_NIM_LLM_MODEL_TOKEN_LIMIT,

View file

@ -62,7 +62,6 @@
"mssql": "^10.0.2",
"multer": "^1.4.5-lts.1",
"mysql2": "^3.9.8",
"node-llama-cpp": "^2.8.0",
"ollama": "^0.5.0",
"openai": "4.38.5",
"pg": "^8.11.5",

View file

@ -5,4 +5,5 @@ openrouter
apipie
novita
mixedbread-ai*
gemini
gemini
togetherAi

View file

@ -16,9 +16,13 @@ class AzureOpenAiLLM {
if (!process.env.AZURE_OPENAI_KEY)
throw new Error("No Azure API key was set.");
this.apiVersion = "2024-12-01-preview";
this.openai = new OpenAIClient(
process.env.AZURE_OPENAI_ENDPOINT,
new AzureKeyCredential(process.env.AZURE_OPENAI_KEY)
new AzureKeyCredential(process.env.AZURE_OPENAI_KEY),
{
apiVersion: this.apiVersion,
}
);
this.model = modelPreference ?? process.env.OPEN_MODEL_PREF;
this.limits = {
@ -29,6 +33,13 @@ class AzureOpenAiLLM {
this.embedder = embedder ?? new NativeEmbedder();
this.defaultTemp = 0.7;
this.#log(
`Initialized. Model "${this.model}" @ ${this.promptWindowLimit()} tokens. API-Version: ${this.apiVersion}`
);
}
#log(text, ...args) {
console.log(`\x1b[32m[AzureOpenAi]\x1b[0m ${text}`, ...args);
}
#appendContext(contextTexts = []) {

View file

@ -1,244 +0,0 @@
const fs = require("fs");
const path = require("path");
const { NativeEmbedder } = require("../../EmbeddingEngines/native");
const {
writeResponseChunk,
clientAbortedHandler,
} = require("../../helpers/chat/responses");
const {
LLMPerformanceMonitor,
} = require("../../helpers/chat/LLMPerformanceMonitor");
// Docs: https://js.langchain.com/docs/integrations/chat/llama_cpp
const ChatLlamaCpp = (...args) =>
import("@langchain/community/chat_models/llama_cpp").then(
({ ChatLlamaCpp }) => new ChatLlamaCpp(...args)
);
class NativeLLM {
constructor(embedder = null, modelPreference = null) {
if (!process.env.NATIVE_LLM_MODEL_PREF)
throw new Error("No local Llama model was set.");
this.model = modelPreference || process.env.NATIVE_LLM_MODEL_PREF || null;
this.limits = {
history: this.promptWindowLimit() * 0.15,
system: this.promptWindowLimit() * 0.15,
user: this.promptWindowLimit() * 0.7,
};
this.embedder = embedder ?? new NativeEmbedder();
this.cacheDir = path.resolve(
process.env.STORAGE_DIR
? path.resolve(process.env.STORAGE_DIR, "models", "downloaded")
: path.resolve(__dirname, `../../../storage/models/downloaded`)
);
// Make directory when it does not exist in existing installations
if (!fs.existsSync(this.cacheDir)) fs.mkdirSync(this.cacheDir);
this.defaultTemp = 0.7;
}
async #initializeLlamaModel(temperature = 0.7) {
const modelPath = path.join(this.cacheDir, this.model);
if (!fs.existsSync(modelPath))
throw new Error(
`Local Llama model ${this.model} was not found in storage!`
);
global.llamaModelInstance = await ChatLlamaCpp({
modelPath,
temperature,
useMlock: true,
});
}
// If the model has been loaded once, it is in the memory now
// so we can skip re-loading it and instead go straight to inference.
// Note: this will break temperature setting hopping between workspaces with different temps.
async #llamaClient({ temperature = 0.7 }) {
if (global.llamaModelInstance) return global.llamaModelInstance;
await this.#initializeLlamaModel(temperature);
return global.llamaModelInstance;
}
#convertToLangchainPrototypes(chats = []) {
const {
HumanMessage,
SystemMessage,
AIMessage,
} = require("@langchain/core/messages");
const langchainChats = [];
const roleToMessageMap = {
system: SystemMessage,
user: HumanMessage,
assistant: AIMessage,
};
for (const chat of chats) {
if (!roleToMessageMap.hasOwnProperty(chat.role)) continue;
const MessageClass = roleToMessageMap[chat.role];
langchainChats.push(new MessageClass({ content: chat.content }));
}
return langchainChats;
}
#appendContext(contextTexts = []) {
if (!contextTexts || !contextTexts.length) return "";
return (
"\nContext:\n" +
contextTexts
.map((text, i) => {
return `[CONTEXT ${i}]:\n${text}\n[END CONTEXT ${i}]\n\n`;
})
.join("")
);
}
streamingEnabled() {
return "streamGetChatCompletion" in this;
}
static promptWindowLimit(_modelName) {
const limit = process.env.NATIVE_LLM_MODEL_TOKEN_LIMIT || 4096;
if (!limit || isNaN(Number(limit)))
throw new Error("No NativeAI token context limit was set.");
return Number(limit);
}
// Ensure the user set a value for the token limit
promptWindowLimit() {
const limit = process.env.NATIVE_LLM_MODEL_TOKEN_LIMIT || 4096;
if (!limit || isNaN(Number(limit)))
throw new Error("No NativeAI token context limit was set.");
return Number(limit);
}
constructPrompt({
systemPrompt = "",
contextTexts = [],
chatHistory = [],
userPrompt = "",
}) {
const prompt = {
role: "system",
content: `${systemPrompt}${this.#appendContext(contextTexts)}`,
};
return [prompt, ...chatHistory, { role: "user", content: userPrompt }];
}
async getChatCompletion(messages = null, { temperature = 0.7 }) {
const model = await this.#llamaClient({ temperature });
const result = await LLMPerformanceMonitor.measureAsyncFunction(
model.call(messages)
);
if (!result.output?.content) return null;
const promptTokens = LLMPerformanceMonitor.countTokens(messages);
const completionTokens = LLMPerformanceMonitor.countTokens(
result.output.content
);
return {
textResponse: result.output.content,
metrics: {
prompt_tokens: promptTokens,
completion_tokens: completionTokens,
total_tokens: promptTokens + completionTokens,
outputTps: completionTokens / result.duration,
duration: result.duration,
},
};
}
async streamGetChatCompletion(messages = null, { temperature = 0.7 }) {
const model = await this.#llamaClient({ temperature });
const measuredStreamRequest = await LLMPerformanceMonitor.measureStream(
model.stream(messages),
messages
);
return measuredStreamRequest;
}
/**
* Handles the default stream response for a chat.
* @param {import("express").Response} response
* @param {import('../../helpers/chat/LLMPerformanceMonitor').MonitoredStream} stream
* @param {Object} responseProps
* @returns {Promise<string>}
*/
handleStream(response, stream, responseProps) {
const { uuid = uuidv4(), sources = [] } = responseProps;
return new Promise(async (resolve) => {
let fullText = "";
// Establish listener to early-abort a streaming response
// in case things go sideways or the user does not like the response.
// We preserve the generated text but continue as if chat was completed
// to preserve previously generated content.
const handleAbort = () => {
stream?.endMeasurement({
completion_tokens: LLMPerformanceMonitor.countTokens(fullText),
});
clientAbortedHandler(resolve, fullText);
};
response.on("close", handleAbort);
for await (const chunk of stream) {
if (chunk === undefined)
throw new Error(
"Stream returned undefined chunk. Aborting reply - check model provider logs."
);
const content = chunk.hasOwnProperty("content") ? chunk.content : chunk;
fullText += content;
writeResponseChunk(response, {
uuid,
sources: [],
type: "textResponseChunk",
textResponse: content,
close: false,
error: false,
});
}
writeResponseChunk(response, {
uuid,
sources,
type: "textResponseChunk",
textResponse: "",
close: true,
error: false,
});
response.removeListener("close", handleAbort);
stream?.endMeasurement({
completion_tokens: LLMPerformanceMonitor.countTokens(fullText),
});
resolve(fullText);
});
}
// Simple wrapper for dynamic embedder & normalize interface for all LLM implementations
async embedTextInput(textInput) {
return await this.embedder.embedTextInput(textInput);
}
async embedChunks(textChunks = []) {
return await this.embedder.embedChunks(textChunks);
}
async compressMessages(promptArgs = {}, rawHistory = []) {
const { messageArrayCompressor } = require("../../helpers/chat");
const messageArray = this.constructPrompt(promptArgs);
const compressedMessages = await messageArrayCompressor(
this,
messageArray,
rawHistory
);
return this.#convertToLangchainPrototypes(compressedMessages);
}
}
module.exports = {
NativeLLM,
};

View file

@ -35,7 +35,7 @@ class NovitaLLM {
this.model =
modelPreference ||
process.env.NOVITA_LLM_MODEL_PREF ||
"gryphe/mythomax-l2-13b";
"deepseek/deepseek-r1";
this.limits = {
history: this.promptWindowLimit() * 0.15,
system: this.promptWindowLimit() * 0.15,

View file

@ -10,7 +10,7 @@ const {
class NvidiaNimLLM {
constructor(embedder = null, modelPreference = null) {
if (!process.env.NVIDIA_NIM_LLM_BASE_PATH)
throw new Error("No Nvidia NIM API Base Path was set.");
throw new Error("No NVIDIA NIM API Base Path was set.");
const { OpenAI: OpenAIApi } = require("openai");
this.nvidiaNim = new OpenAIApi({
@ -85,7 +85,7 @@ class NvidiaNimLLM {
static promptWindowLimit(_modelName) {
const limit = process.env.NVIDIA_NIM_LLM_MODEL_TOKEN_LIMIT || 4096;
if (!limit || isNaN(Number(limit)))
throw new Error("No Nvidia NIM token context limit was set.");
throw new Error("No NVIDIA NIM token context limit was set.");
return Number(limit);
}
@ -94,7 +94,7 @@ class NvidiaNimLLM {
promptWindowLimit() {
const limit = process.env.NVIDIA_NIM_LLM_MODEL_TOKEN_LIMIT || 4096;
if (!limit || isNaN(Number(limit)))
throw new Error("No Nvidia NIM token context limit was set.");
throw new Error("No NVIDIA NIM token context limit was set.");
return Number(limit);
}
@ -154,7 +154,7 @@ class NvidiaNimLLM {
async getChatCompletion(messages = null, { temperature = 0.7 }) {
if (!this.model)
throw new Error(
`Nvidia NIM chat: ${this.model} is not valid or defined model for chat completion!`
`NVIDIA NIM chat: ${this.model} is not valid or defined model for chat completion!`
);
const result = await LLMPerformanceMonitor.measureAsyncFunction(
@ -190,7 +190,7 @@ class NvidiaNimLLM {
async streamGetChatCompletion(messages = null, { temperature = 0.7 }) {
if (!this.model)
throw new Error(
`Nvidia NIM chat: ${this.model} is not valid or defined model for chat completion!`
`NVIDIA NIM chat: ${this.model} is not valid or defined model for chat completion!`
);
const measuredStreamRequest = await LLMPerformanceMonitor.measureStream(

View file

@ -1,39 +1,14 @@
const MODELS = {
"llama-3.1-sonar-small-128k-online": {
id: "llama-3.1-sonar-small-128k-online",
name: "llama-3.1-sonar-small-128k-online",
"sonar-pro": {
id: "sonar-pro",
name: "sonar-pro",
maxLength: 200000,
},
sonar: {
id: "sonar",
name: "sonar",
maxLength: 127072,
},
"llama-3.1-sonar-large-128k-online": {
id: "llama-3.1-sonar-large-128k-online",
name: "llama-3.1-sonar-large-128k-online",
maxLength: 127072,
},
"llama-3.1-sonar-huge-128k-online": {
id: "llama-3.1-sonar-huge-128k-online",
name: "llama-3.1-sonar-huge-128k-online",
maxLength: 127072,
},
"llama-3.1-sonar-small-128k-chat": {
id: "llama-3.1-sonar-small-128k-chat",
name: "llama-3.1-sonar-small-128k-chat",
maxLength: 131072,
},
"llama-3.1-sonar-large-128k-chat": {
id: "llama-3.1-sonar-large-128k-chat",
name: "llama-3.1-sonar-large-128k-chat",
maxLength: 131072,
},
"llama-3.1-8b-instruct": {
id: "llama-3.1-8b-instruct",
name: "llama-3.1-8b-instruct",
maxLength: 131072,
},
"llama-3.1-70b-instruct": {
id: "llama-3.1-70b-instruct",
name: "llama-3.1-70b-instruct",
maxLength: 131072,
},
};
module.exports.MODELS = MODELS;

View file

@ -1,9 +1,4 @@
| Model | Parameter Count | Context Length | Model Type |
| :---------------------------------- | :-------------- | :------------- | :-------------- |
| `llama-3.1-sonar-small-128k-online` | 8B | 127,072 | Chat Completion |
| `llama-3.1-sonar-large-128k-online` | 70B | 127,072 | Chat Completion |
| `llama-3.1-sonar-huge-128k-online` | 405B | 127,072 | Chat Completion |
| `llama-3.1-sonar-small-128k-chat` | 8B | 131,072 | Chat Completion |
| `llama-3.1-sonar-large-128k-chat` | 70B | 131,072 | Chat Completion |
| `llama-3.1-8b-instruct` | 8B | 131,072 | Chat Completion |
| `llama-3.1-70b-instruct` | 70B | 131,072 | Chat Completion |
| `sonar-pro` | 8B | 200,000 | Chat Completion |
| `sonar` | 8B | 127,072 | Chat Completion |

View file

@ -8,7 +8,12 @@
// copy outputs into the export in ../models.js
// Update the date below if you run this again because Perplexity added new models.
// Last Collected: Sept 12, 2024
// Last Collected: Jan 23, 2025
// UPDATE: Jan 23, 2025
// The table is no longer available on the website, but Perplexity has deprecated the
// old models so now we can just update the chat_models.txt file with the new models
// manually and then run this script to get the new models.
import fs from "fs";

View file

@ -5,10 +5,76 @@ const {
const {
LLMPerformanceMonitor,
} = require("../../helpers/chat/LLMPerformanceMonitor");
const fs = require("fs");
const path = require("path");
const { safeJsonParse } = require("../../http");
function togetherAiModels() {
const { MODELS } = require("./models.js");
return MODELS || {};
const cacheFolder = path.resolve(
process.env.STORAGE_DIR
? path.resolve(process.env.STORAGE_DIR, "models", "togetherAi")
: path.resolve(__dirname, `../../../storage/models/togetherAi`)
);
async function togetherAiModels(apiKey = null) {
const cacheModelPath = path.resolve(cacheFolder, "models.json");
const cacheAtPath = path.resolve(cacheFolder, ".cached_at");
// If cache exists and is less than 1 week old, use it
if (fs.existsSync(cacheModelPath) && fs.existsSync(cacheAtPath)) {
const now = Number(new Date());
const timestampMs = Number(fs.readFileSync(cacheAtPath));
if (now - timestampMs <= 6.048e8) {
// 1 Week in MS
return safeJsonParse(
fs.readFileSync(cacheModelPath, { encoding: "utf-8" }),
[]
);
}
}
try {
const { OpenAI: OpenAIApi } = require("openai");
const openai = new OpenAIApi({
baseURL: "https://api.together.xyz/v1",
apiKey: apiKey || process.env.TOGETHER_AI_API_KEY || null,
});
const response = await openai.models.list();
// Filter and transform models into the expected format
// Only include chat models
const validModels = response.body
.filter((model) => ["chat"].includes(model.type))
.map((model) => ({
id: model.id,
name: model.display_name || model.id,
organization: model.organization || "Unknown",
type: model.type,
maxLength: model.context_length || 4096,
}));
// Cache the results
if (!fs.existsSync(cacheFolder))
fs.mkdirSync(cacheFolder, { recursive: true });
fs.writeFileSync(cacheModelPath, JSON.stringify(validModels), {
encoding: "utf-8",
});
fs.writeFileSync(cacheAtPath, String(Number(new Date())), {
encoding: "utf-8",
});
return validModels;
} catch (error) {
console.error("Error fetching Together AI models:", error);
// If cache exists but is stale, still use it as fallback
if (fs.existsSync(cacheModelPath)) {
return safeJsonParse(
fs.readFileSync(cacheModelPath, { encoding: "utf-8" }),
[]
);
}
return [];
}
}
class TogetherAiLLM {
@ -60,29 +126,34 @@ class TogetherAiLLM {
return content.flat();
}
allModelInformation() {
return togetherAiModels();
async allModelInformation() {
const models = await togetherAiModels();
return models.reduce((acc, model) => {
acc[model.id] = model;
return acc;
}, {});
}
streamingEnabled() {
return "streamGetChatCompletion" in this;
}
static promptWindowLimit(modelName) {
const availableModels = togetherAiModels();
return availableModels[modelName]?.maxLength || 4096;
static async promptWindowLimit(modelName) {
const models = await togetherAiModels();
const model = models.find((m) => m.id === modelName);
return model?.maxLength || 4096;
}
// Ensure the user set a value for the token limit
// and if undefined - assume 4096 window.
promptWindowLimit() {
const availableModels = this.allModelInformation();
return availableModels[this.model]?.maxLength || 4096;
async promptWindowLimit() {
const models = await togetherAiModels();
const model = models.find((m) => m.id === this.model);
return model?.maxLength || 4096;
}
async isValidChatCompletionModel(model = "") {
const availableModels = this.allModelInformation();
return availableModels.hasOwnProperty(model);
const models = await togetherAiModels();
const foundModel = models.find((m) => m.id === model);
return foundModel && foundModel.type === "chat";
}
constructPrompt({

View file

@ -1,622 +0,0 @@
const MODELS = {
"zero-one-ai/Yi-34B-Chat": {
id: "zero-one-ai/Yi-34B-Chat",
organization: "01.AI",
name: "01-ai Yi Chat (34B)",
maxLength: 4096,
},
"allenai/OLMo-7B-Instruct": {
id: "allenai/OLMo-7B-Instruct",
organization: "AllenAI",
name: "OLMo Instruct (7B)",
maxLength: 2048,
},
"Austism/chronos-hermes-13b": {
id: "Austism/chronos-hermes-13b",
organization: "Austism",
name: "Chronos Hermes (13B)",
maxLength: 2048,
},
"carson/ml318br": {
id: "carson/ml318br",
organization: "carson",
name: "carson ml318br",
maxLength: 8192,
},
"cognitivecomputations/dolphin-2.5-mixtral-8x7b": {
id: "cognitivecomputations/dolphin-2.5-mixtral-8x7b",
organization: "cognitivecomputations",
name: "Dolphin 2.5 Mixtral 8x7b",
maxLength: 32768,
},
"databricks/dbrx-instruct": {
id: "databricks/dbrx-instruct",
organization: "Databricks",
name: "DBRX Instruct",
maxLength: 32768,
},
"deepseek-ai/deepseek-llm-67b-chat": {
id: "deepseek-ai/deepseek-llm-67b-chat",
organization: "DeepSeek",
name: "DeepSeek LLM Chat (67B)",
maxLength: 4096,
},
"deepseek-ai/deepseek-coder-33b-instruct": {
id: "deepseek-ai/deepseek-coder-33b-instruct",
organization: "DeepSeek",
name: "Deepseek Coder Instruct (33B)",
maxLength: 16384,
},
"garage-bAInd/Platypus2-70B-instruct": {
id: "garage-bAInd/Platypus2-70B-instruct",
organization: "garage-bAInd",
name: "Platypus2 Instruct (70B)",
maxLength: 4096,
},
"google/gemma-2-9b-it": {
id: "google/gemma-2-9b-it",
organization: "google",
name: "Gemma-2 Instruct (9B)",
maxLength: 8192,
},
"google/gemma-2b-it": {
id: "google/gemma-2b-it",
organization: "Google",
name: "Gemma Instruct (2B)",
maxLength: 8192,
},
"google/gemma-2-27b-it": {
id: "google/gemma-2-27b-it",
organization: "Google",
name: "Gemma-2 Instruct (27B)",
maxLength: 8192,
},
"google/gemma-7b-it": {
id: "google/gemma-7b-it",
organization: "Google",
name: "Gemma Instruct (7B)",
maxLength: 8192,
},
"gradientai/Llama-3-70B-Instruct-Gradient-1048k": {
id: "gradientai/Llama-3-70B-Instruct-Gradient-1048k",
organization: "gradientai",
name: "Llama-3 70B Instruct Gradient 1048K",
maxLength: 1048576,
},
"Gryphe/MythoMax-L2-13b": {
id: "Gryphe/MythoMax-L2-13b",
organization: "Gryphe",
name: "MythoMax-L2 (13B)",
maxLength: 4096,
},
"Gryphe/MythoMax-L2-13b-Lite": {
id: "Gryphe/MythoMax-L2-13b-Lite",
organization: "Gryphe",
name: "Gryphe MythoMax L2 Lite (13B)",
maxLength: 4096,
},
"llava-hf/llava-v1.6-mistral-7b-hf": {
id: "llava-hf/llava-v1.6-mistral-7b-hf",
organization: "Haotian Liu",
name: "LLaVa-Next (Mistral-7B)",
maxLength: 4096,
},
"HuggingFaceH4/zephyr-7b-beta": {
id: "HuggingFaceH4/zephyr-7b-beta",
organization: "HuggingFace",
name: "Zephyr-7B-ß",
maxLength: 32768,
},
"togethercomputer/Koala-7B": {
id: "togethercomputer/Koala-7B",
organization: "LM Sys",
name: "Koala (7B)",
maxLength: 2048,
},
"lmsys/vicuna-7b-v1.3": {
id: "lmsys/vicuna-7b-v1.3",
organization: "LM Sys",
name: "Vicuna v1.3 (7B)",
maxLength: 2048,
},
"lmsys/vicuna-13b-v1.5-16k": {
id: "lmsys/vicuna-13b-v1.5-16k",
organization: "LM Sys",
name: "Vicuna v1.5 16K (13B)",
maxLength: 16384,
},
"lmsys/vicuna-13b-v1.5": {
id: "lmsys/vicuna-13b-v1.5",
organization: "LM Sys",
name: "Vicuna v1.5 (13B)",
maxLength: 4096,
},
"lmsys/vicuna-13b-v1.3": {
id: "lmsys/vicuna-13b-v1.3",
organization: "LM Sys",
name: "Vicuna v1.3 (13B)",
maxLength: 2048,
},
"togethercomputer/Koala-13B": {
id: "togethercomputer/Koala-13B",
organization: "LM Sys",
name: "Koala (13B)",
maxLength: 2048,
},
"lmsys/vicuna-7b-v1.5": {
id: "lmsys/vicuna-7b-v1.5",
organization: "LM Sys",
name: "Vicuna v1.5 (7B)",
maxLength: 4096,
},
"codellama/CodeLlama-34b-Instruct-hf": {
id: "codellama/CodeLlama-34b-Instruct-hf",
organization: "Meta",
name: "Code Llama Instruct (34B)",
maxLength: 16384,
},
"togethercomputer/Llama-3-8b-chat-hf-int4": {
id: "togethercomputer/Llama-3-8b-chat-hf-int4",
organization: "Meta",
name: "Llama3 8B Chat HF INT4",
maxLength: 8192,
},
"meta-llama/Llama-3.2-90B-Vision-Instruct-Turbo": {
id: "meta-llama/Llama-3.2-90B-Vision-Instruct-Turbo",
organization: "Meta",
name: "Llama 3.2 90B Vision Instruct Turbo",
maxLength: 131072,
},
"meta-llama/Llama-3.2-11B-Vision-Instruct-Turbo": {
id: "meta-llama/Llama-3.2-11B-Vision-Instruct-Turbo",
organization: "Meta",
name: "Llama 3.2 11B Vision Instruct Turbo",
maxLength: 131072,
},
"meta-llama/Llama-3.2-3B-Instruct-Turbo": {
id: "meta-llama/Llama-3.2-3B-Instruct-Turbo",
organization: "Meta",
name: "Meta Llama 3.2 3B Instruct Turbo",
maxLength: 131072,
},
"togethercomputer/Llama-3-8b-chat-hf-int8": {
id: "togethercomputer/Llama-3-8b-chat-hf-int8",
organization: "Meta",
name: "Togethercomputer Llama3 8B Instruct Int8",
maxLength: 8192,
},
"meta-llama/Meta-Llama-3.1-70B-Instruct-Turbo": {
id: "meta-llama/Meta-Llama-3.1-70B-Instruct-Turbo",
organization: "Meta",
name: "Meta Llama 3.1 70B Instruct Turbo",
maxLength: 32768,
},
"meta-llama/Llama-2-13b-chat-hf": {
id: "meta-llama/Llama-2-13b-chat-hf",
organization: "Meta",
name: "LLaMA-2 Chat (13B)",
maxLength: 4096,
},
"meta-llama/Meta-Llama-3-70B-Instruct-Lite": {
id: "meta-llama/Meta-Llama-3-70B-Instruct-Lite",
organization: "Meta",
name: "Meta Llama 3 70B Instruct Lite",
maxLength: 8192,
},
"meta-llama/Llama-3-8b-chat-hf": {
id: "meta-llama/Llama-3-8b-chat-hf",
organization: "Meta",
name: "Meta Llama 3 8B Instruct Reference",
maxLength: 8192,
},
"meta-llama/Llama-3-70b-chat-hf": {
id: "meta-llama/Llama-3-70b-chat-hf",
organization: "Meta",
name: "Meta Llama 3 70B Instruct Reference",
maxLength: 8192,
},
"meta-llama/Meta-Llama-3-8B-Instruct-Turbo": {
id: "meta-llama/Meta-Llama-3-8B-Instruct-Turbo",
organization: "Meta",
name: "Meta Llama 3 8B Instruct Turbo",
maxLength: 8192,
},
"meta-llama/Meta-Llama-3-8B-Instruct-Lite": {
id: "meta-llama/Meta-Llama-3-8B-Instruct-Lite",
organization: "Meta",
name: "Meta Llama 3 8B Instruct Lite",
maxLength: 8192,
},
"meta-llama/Meta-Llama-3.1-405B-Instruct-Lite-Pro": {
id: "meta-llama/Meta-Llama-3.1-405B-Instruct-Lite-Pro",
organization: "Meta",
name: "Meta Llama 3.1 405B Instruct Turbo",
maxLength: 4096,
},
"meta-llama/Llama-2-7b-chat-hf": {
id: "meta-llama/Llama-2-7b-chat-hf",
organization: "Meta",
name: "LLaMA-2 Chat (7B)",
maxLength: 4096,
},
"meta-llama/Meta-Llama-3.1-405B-Instruct-Turbo": {
id: "meta-llama/Meta-Llama-3.1-405B-Instruct-Turbo",
organization: "Meta",
name: "Meta Llama 3.1 405B Instruct Turbo",
maxLength: 130815,
},
"meta-llama/Llama-Vision-Free": {
id: "meta-llama/Llama-Vision-Free",
organization: "Meta",
name: "(Free) Llama 3.2 11B Vision Instruct Turbo",
maxLength: 131072,
},
"meta-llama/Meta-Llama-3-70B-Instruct-Turbo": {
id: "meta-llama/Meta-Llama-3-70B-Instruct-Turbo",
organization: "Meta",
name: "Meta Llama 3 70B Instruct Turbo",
maxLength: 8192,
},
"meta-llama/Meta-Llama-3.1-8B-Instruct-Turbo": {
id: "meta-llama/Meta-Llama-3.1-8B-Instruct-Turbo",
organization: "Meta",
name: "Meta Llama 3.1 8B Instruct Turbo",
maxLength: 32768,
},
"togethercomputer/CodeLlama-7b-Instruct": {
id: "togethercomputer/CodeLlama-7b-Instruct",
organization: "Meta",
name: "Code Llama Instruct (7B)",
maxLength: 16384,
},
"togethercomputer/CodeLlama-34b-Instruct": {
id: "togethercomputer/CodeLlama-34b-Instruct",
organization: "Meta",
name: "Code Llama Instruct (34B)",
maxLength: 16384,
},
"codellama/CodeLlama-13b-Instruct-hf": {
id: "codellama/CodeLlama-13b-Instruct-hf",
organization: "Meta",
name: "Code Llama Instruct (13B)",
maxLength: 16384,
},
"togethercomputer/CodeLlama-13b-Instruct": {
id: "togethercomputer/CodeLlama-13b-Instruct",
organization: "Meta",
name: "Code Llama Instruct (13B)",
maxLength: 16384,
},
"togethercomputer/llama-2-13b-chat": {
id: "togethercomputer/llama-2-13b-chat",
organization: "Meta",
name: "LLaMA-2 Chat (13B)",
maxLength: 4096,
},
"togethercomputer/llama-2-7b-chat": {
id: "togethercomputer/llama-2-7b-chat",
organization: "Meta",
name: "LLaMA-2 Chat (7B)",
maxLength: 4096,
},
"meta-llama/Meta-Llama-3-8B-Instruct": {
id: "meta-llama/Meta-Llama-3-8B-Instruct",
organization: "Meta",
name: "Meta Llama 3 8B Instruct",
maxLength: 8192,
},
"meta-llama/Meta-Llama-3-70B-Instruct": {
id: "meta-llama/Meta-Llama-3-70B-Instruct",
organization: "Meta",
name: "Meta Llama 3 70B Instruct",
maxLength: 8192,
},
"codellama/CodeLlama-70b-Instruct-hf": {
id: "codellama/CodeLlama-70b-Instruct-hf",
organization: "Meta",
name: "Code Llama Instruct (70B)",
maxLength: 4096,
},
"togethercomputer/llama-2-70b-chat": {
id: "togethercomputer/llama-2-70b-chat",
organization: "Meta",
name: "LLaMA-2 Chat (70B)",
maxLength: 4096,
},
"codellama/CodeLlama-7b-Instruct-hf": {
id: "codellama/CodeLlama-7b-Instruct-hf",
organization: "Meta",
name: "Code Llama Instruct (7B)",
maxLength: 16384,
},
"meta-llama/Llama-2-70b-chat-hf": {
id: "meta-llama/Llama-2-70b-chat-hf",
organization: "Meta",
name: "LLaMA-2 Chat (70B)",
maxLength: 4096,
},
"meta-llama/Meta-Llama-3.1-8B-Instruct-Reference": {
id: "meta-llama/Meta-Llama-3.1-8B-Instruct-Reference",
organization: "Meta",
name: "Meta Llama 3.1 8B Instruct",
maxLength: 16384,
},
"albert/meta-llama-3-1-70b-instruct-turbo": {
id: "albert/meta-llama-3-1-70b-instruct-turbo",
organization: "Meta",
name: "Meta Llama 3.1 70B Instruct Turbo",
maxLength: 131072,
},
"meta-llama/Meta-Llama-3.1-70B-Instruct-Reference": {
id: "meta-llama/Meta-Llama-3.1-70B-Instruct-Reference",
organization: "Meta",
name: "Meta Llama 3.1 70B Instruct",
maxLength: 8192,
},
"microsoft/WizardLM-2-8x22B": {
id: "microsoft/WizardLM-2-8x22B",
organization: "microsoft",
name: "WizardLM-2 (8x22B)",
maxLength: 65536,
},
"mistralai/Mistral-7B-Instruct-v0.1": {
id: "mistralai/Mistral-7B-Instruct-v0.1",
organization: "mistralai",
name: "Mistral (7B) Instruct",
maxLength: 4096,
},
"mistralai/Mistral-7B-Instruct-v0.2": {
id: "mistralai/Mistral-7B-Instruct-v0.2",
organization: "mistralai",
name: "Mistral (7B) Instruct v0.2",
maxLength: 32768,
},
"mistralai/Mistral-7B-Instruct-v0.3": {
id: "mistralai/Mistral-7B-Instruct-v0.3",
organization: "mistralai",
name: "Mistral (7B) Instruct v0.3",
maxLength: 32768,
},
"mistralai/Mixtral-8x7B-Instruct-v0.1": {
id: "mistralai/Mixtral-8x7B-Instruct-v0.1",
organization: "mistralai",
name: "Mixtral-8x7B Instruct v0.1",
maxLength: 32768,
},
"mistralai/Mixtral-8x22B-Instruct-v0.1": {
id: "mistralai/Mixtral-8x22B-Instruct-v0.1",
organization: "mistralai",
name: "Mixtral-8x22B Instruct v0.1",
maxLength: 65536,
},
"NousResearch/Nous-Hermes-2-Mixtral-8x7B-DPO": {
id: "NousResearch/Nous-Hermes-2-Mixtral-8x7B-DPO",
organization: "NousResearch",
name: "Nous Hermes 2 - Mixtral 8x7B-DPO",
maxLength: 32768,
},
"NousResearch/Nous-Hermes-Llama2-70b": {
id: "NousResearch/Nous-Hermes-Llama2-70b",
organization: "NousResearch",
name: "Nous Hermes LLaMA-2 (70B)",
maxLength: 4096,
},
"NousResearch/Nous-Hermes-2-Mixtral-8x7B-SFT": {
id: "NousResearch/Nous-Hermes-2-Mixtral-8x7B-SFT",
organization: "NousResearch",
name: "Nous Hermes 2 - Mixtral 8x7B-SFT",
maxLength: 32768,
},
"NousResearch/Nous-Hermes-Llama2-13b": {
id: "NousResearch/Nous-Hermes-Llama2-13b",
organization: "NousResearch",
name: "Nous Hermes Llama-2 (13B)",
maxLength: 4096,
},
"NousResearch/Nous-Hermes-2-Mistral-7B-DPO": {
id: "NousResearch/Nous-Hermes-2-Mistral-7B-DPO",
organization: "NousResearch",
name: "Nous Hermes 2 - Mistral DPO (7B)",
maxLength: 32768,
},
"NousResearch/Nous-Hermes-llama-2-7b": {
id: "NousResearch/Nous-Hermes-llama-2-7b",
organization: "NousResearch",
name: "Nous Hermes LLaMA-2 (7B)",
maxLength: 4096,
},
"NousResearch/Nous-Capybara-7B-V1p9": {
id: "NousResearch/Nous-Capybara-7B-V1p9",
organization: "NousResearch",
name: "Nous Capybara v1.9 (7B)",
maxLength: 8192,
},
"NousResearch/Hermes-2-Theta-Llama-3-70B": {
id: "NousResearch/Hermes-2-Theta-Llama-3-70B",
organization: "NousResearch",
name: "Hermes 2 Theta Llama-3 70B",
maxLength: 8192,
},
"openchat/openchat-3.5-1210": {
id: "openchat/openchat-3.5-1210",
organization: "OpenChat",
name: "OpenChat 3.5",
maxLength: 8192,
},
"Open-Orca/Mistral-7B-OpenOrca": {
id: "Open-Orca/Mistral-7B-OpenOrca",
organization: "OpenOrca",
name: "OpenOrca Mistral (7B) 8K",
maxLength: 8192,
},
"Qwen/Qwen2-72B-Instruct": {
id: "Qwen/Qwen2-72B-Instruct",
organization: "Qwen",
name: "Qwen 2 Instruct (72B)",
maxLength: 32768,
},
"Qwen/Qwen2.5-72B-Instruct-Turbo": {
id: "Qwen/Qwen2.5-72B-Instruct-Turbo",
organization: "Qwen",
name: "Qwen2.5 72B Instruct Turbo",
maxLength: 32768,
},
"Qwen/Qwen2.5-7B-Instruct-Turbo": {
id: "Qwen/Qwen2.5-7B-Instruct-Turbo",
organization: "Qwen",
name: "Qwen2.5 7B Instruct Turbo",
maxLength: 32768,
},
"Qwen/Qwen1.5-110B-Chat": {
id: "Qwen/Qwen1.5-110B-Chat",
organization: "Qwen",
name: "Qwen 1.5 Chat (110B)",
maxLength: 32768,
},
"Qwen/Qwen1.5-72B-Chat": {
id: "Qwen/Qwen1.5-72B-Chat",
organization: "Qwen",
name: "Qwen 1.5 Chat (72B)",
maxLength: 32768,
},
"Qwen/Qwen2-1.5B-Instruct": {
id: "Qwen/Qwen2-1.5B-Instruct",
organization: "Qwen",
name: "Qwen 2 Instruct (1.5B)",
maxLength: 32768,
},
"Qwen/Qwen2-7B-Instruct": {
id: "Qwen/Qwen2-7B-Instruct",
organization: "Qwen",
name: "Qwen 2 Instruct (7B)",
maxLength: 32768,
},
"Qwen/Qwen1.5-14B-Chat": {
id: "Qwen/Qwen1.5-14B-Chat",
organization: "Qwen",
name: "Qwen 1.5 Chat (14B)",
maxLength: 32768,
},
"Qwen/Qwen1.5-1.8B-Chat": {
id: "Qwen/Qwen1.5-1.8B-Chat",
organization: "Qwen",
name: "Qwen 1.5 Chat (1.8B)",
maxLength: 32768,
},
"Qwen/Qwen1.5-32B-Chat": {
id: "Qwen/Qwen1.5-32B-Chat",
organization: "Qwen",
name: "Qwen 1.5 Chat (32B)",
maxLength: 32768,
},
"Qwen/Qwen1.5-7B-Chat": {
id: "Qwen/Qwen1.5-7B-Chat",
organization: "Qwen",
name: "Qwen 1.5 Chat (7B)",
maxLength: 32768,
},
"Qwen/Qwen1.5-0.5B-Chat": {
id: "Qwen/Qwen1.5-0.5B-Chat",
organization: "Qwen",
name: "Qwen 1.5 Chat (0.5B)",
maxLength: 32768,
},
"Qwen/Qwen1.5-4B-Chat": {
id: "Qwen/Qwen1.5-4B-Chat",
organization: "Qwen",
name: "Qwen 1.5 Chat (4B)",
maxLength: 32768,
},
"snorkelai/Snorkel-Mistral-PairRM-DPO": {
id: "snorkelai/Snorkel-Mistral-PairRM-DPO",
organization: "Snorkel AI",
name: "Snorkel Mistral PairRM DPO (7B)",
maxLength: 32768,
},
"Snowflake/snowflake-arctic-instruct": {
id: "Snowflake/snowflake-arctic-instruct",
organization: "Snowflake",
name: "Snowflake Arctic Instruct",
maxLength: 4096,
},
"togethercomputer/alpaca-7b": {
id: "togethercomputer/alpaca-7b",
organization: "Stanford",
name: "Alpaca (7B)",
maxLength: 2048,
},
"teknium/OpenHermes-2-Mistral-7B": {
id: "teknium/OpenHermes-2-Mistral-7B",
organization: "teknium",
name: "OpenHermes-2-Mistral (7B)",
maxLength: 8192,
},
"teknium/OpenHermes-2p5-Mistral-7B": {
id: "teknium/OpenHermes-2p5-Mistral-7B",
organization: "teknium",
name: "OpenHermes-2.5-Mistral (7B)",
maxLength: 8192,
},
"test/test11": {
id: "test/test11",
organization: "test",
name: "Test 11",
maxLength: 4096,
},
"togethercomputer/guanaco-65b": {
id: "togethercomputer/guanaco-65b",
organization: "Tim Dettmers",
name: "Guanaco (65B)",
maxLength: 2048,
},
"togethercomputer/guanaco-13b": {
id: "togethercomputer/guanaco-13b",
organization: "Tim Dettmers",
name: "Guanaco (13B)",
maxLength: 2048,
},
"togethercomputer/guanaco-33b": {
id: "togethercomputer/guanaco-33b",
organization: "Tim Dettmers",
name: "Guanaco (33B)",
maxLength: 2048,
},
"togethercomputer/guanaco-7b": {
id: "togethercomputer/guanaco-7b",
organization: "Tim Dettmers",
name: "Guanaco (7B)",
maxLength: 2048,
},
"Undi95/ReMM-SLERP-L2-13B": {
id: "Undi95/ReMM-SLERP-L2-13B",
organization: "Undi95",
name: "ReMM SLERP L2 (13B)",
maxLength: 4096,
},
"Undi95/Toppy-M-7B": {
id: "Undi95/Toppy-M-7B",
organization: "Undi95",
name: "Toppy M (7B)",
maxLength: 4096,
},
"upstage/SOLAR-10.7B-Instruct-v1.0": {
id: "upstage/SOLAR-10.7B-Instruct-v1.0",
organization: "upstage",
name: "Upstage SOLAR Instruct v1 (11B)",
maxLength: 4096,
},
"togethercomputer/SOLAR-10.7B-Instruct-v1.0-int4": {
id: "togethercomputer/SOLAR-10.7B-Instruct-v1.0-int4",
organization: "upstage",
name: "Upstage SOLAR Instruct v1 (11B)-Int4",
maxLength: 4096,
},
"WizardLM/WizardLM-13B-V1.2": {
id: "WizardLM/WizardLM-13B-V1.2",
organization: "WizardLM",
name: "WizardLM v1.2 (13B)",
maxLength: 4096,
},
};
module.exports.MODELS = MODELS;

View file

@ -1 +0,0 @@
*.json

View file

@ -1,108 +0,0 @@
| Organization | Model Name | API Model String | Context length | Quantization |
| :-------------------- | :--------------------------------------- | :----------------------------------------------- | :------------- | :----------- |
| 01.AI | 01-ai Yi Chat (34B) | zero-one-ai/Yi-34B-Chat | 4096 | FP16 |
| AllenAI | OLMo Instruct (7B) | allenai/OLMo-7B-Instruct | 2048 | FP16 |
| Austism | Chronos Hermes (13B) | Austism/chronos-hermes-13b | 2048 | FP16 |
| carson | carson ml318br | carson/ml318br | 8192 | FP16 |
| cognitivecomputations | Dolphin 2.5 Mixtral 8x7b | cognitivecomputations/dolphin-2.5-mixtral-8x7b | 32768 | FP16 |
| Databricks | DBRX Instruct | databricks/dbrx-instruct | 32768 | FP16 |
| DeepSeek | DeepSeek LLM Chat (67B) | deepseek-ai/deepseek-llm-67b-chat | 4096 | FP16 |
| DeepSeek | Deepseek Coder Instruct (33B) | deepseek-ai/deepseek-coder-33b-instruct | 16384 | FP16 |
| garage-bAInd | Platypus2 Instruct (70B) | garage-bAInd/Platypus2-70B-instruct | 4096 | FP16 |
| google | Gemma-2 Instruct (9B) | google/gemma-2-9b-it | 8192 | FP16 |
| Google | Gemma Instruct (2B) | google/gemma-2b-it | 8192 | FP16 |
| Google | Gemma-2 Instruct (27B) | google/gemma-2-27b-it | 8192 | FP16 |
| Google | Gemma Instruct (7B) | google/gemma-7b-it | 8192 | FP16 |
| gradientai | Llama-3 70B Instruct Gradient 1048K | gradientai/Llama-3-70B-Instruct-Gradient-1048k | 1048576 | FP16 |
| Gryphe | MythoMax-L2 (13B) | Gryphe/MythoMax-L2-13b | 4096 | FP16 |
| Gryphe | Gryphe MythoMax L2 Lite (13B) | Gryphe/MythoMax-L2-13b-Lite | 4096 | FP16 |
| Haotian Liu | LLaVa-Next (Mistral-7B) | llava-hf/llava-v1.6-mistral-7b-hf | 4096 | FP16 |
| HuggingFace | Zephyr-7B-ß | HuggingFaceH4/zephyr-7b-beta | 32768 | FP16 |
| LM Sys | Koala (7B) | togethercomputer/Koala-7B | 2048 | FP16 |
| LM Sys | Vicuna v1.3 (7B) | lmsys/vicuna-7b-v1.3 | 2048 | FP16 |
| LM Sys | Vicuna v1.5 16K (13B) | lmsys/vicuna-13b-v1.5-16k | 16384 | FP16 |
| LM Sys | Vicuna v1.5 (13B) | lmsys/vicuna-13b-v1.5 | 4096 | FP16 |
| LM Sys | Vicuna v1.3 (13B) | lmsys/vicuna-13b-v1.3 | 2048 | FP16 |
| LM Sys | Koala (13B) | togethercomputer/Koala-13B | 2048 | FP16 |
| LM Sys | Vicuna v1.5 (7B) | lmsys/vicuna-7b-v1.5 | 4096 | FP16 |
| Meta | Code Llama Instruct (34B) | codellama/CodeLlama-34b-Instruct-hf | 16384 | FP16 |
| Meta | Llama3 8B Chat HF INT4 | togethercomputer/Llama-3-8b-chat-hf-int4 | 8192 | FP16 |
| Meta | Meta Llama 3.2 90B Vision Instruct Turbo | meta-llama/Llama-3.2-90B-Vision-Instruct-Turbo | 131072 | FP16 |
| Meta | Meta Llama 3.2 11B Vision Instruct Turbo | meta-llama/Llama-3.2-11B-Vision-Instruct-Turbo | 131072 | FP16 |
| Meta | Meta Llama 3.2 3B Instruct Turbo | meta-llama/Llama-3.2-3B-Instruct-Turbo | 131072 | FP16 |
| Meta | Togethercomputer Llama3 8B Instruct Int8 | togethercomputer/Llama-3-8b-chat-hf-int8 | 8192 | FP16 |
| Meta | Meta Llama 3.1 70B Instruct Turbo | meta-llama/Meta-Llama-3.1-70B-Instruct-Turbo | 32768 | FP8 |
| Meta | LLaMA-2 Chat (13B) | meta-llama/Llama-2-13b-chat-hf | 4096 | FP16 |
| Meta | Meta Llama 3 70B Instruct Lite | meta-llama/Meta-Llama-3-70B-Instruct-Lite | 8192 | INT4 |
| Meta | Meta Llama 3 8B Instruct Reference | meta-llama/Llama-3-8b-chat-hf | 8192 | FP16 |
| Meta | Meta Llama 3 70B Instruct Reference | meta-llama/Llama-3-70b-chat-hf | 8192 | FP16 |
| Meta | Meta Llama 3 8B Instruct Turbo | meta-llama/Meta-Llama-3-8B-Instruct-Turbo | 8192 | FP8 |
| Meta | Meta Llama 3 8B Instruct Lite | meta-llama/Meta-Llama-3-8B-Instruct-Lite | 8192 | INT4 |
| Meta | Meta Llama 3.1 405B Instruct Turbo | meta-llama/Meta-Llama-3.1-405B-Instruct-Lite-Pro | 4096 | FP16 |
| Meta | LLaMA-2 Chat (7B) | meta-llama/Llama-2-7b-chat-hf | 4096 | FP16 |
| Meta | Meta Llama 3.1 405B Instruct Turbo | meta-llama/Meta-Llama-3.1-405B-Instruct-Turbo | 130815 | FP8 |
| Meta | Meta Llama Vision Free | meta-llama/Llama-Vision-Free | 131072 | FP16 |
| Meta | Meta Llama 3 70B Instruct Turbo | meta-llama/Meta-Llama-3-70B-Instruct-Turbo | 8192 | FP8 |
| Meta | Meta Llama 3.1 8B Instruct Turbo | meta-llama/Meta-Llama-3.1-8B-Instruct-Turbo | 32768 | FP8 |
| Meta | Code Llama Instruct (7B) | togethercomputer/CodeLlama-7b-Instruct | 16384 | FP16 |
| Meta | Code Llama Instruct (34B) | togethercomputer/CodeLlama-34b-Instruct | 16384 | FP16 |
| Meta | Code Llama Instruct (13B) | codellama/CodeLlama-13b-Instruct-hf | 16384 | FP16 |
| Meta | Code Llama Instruct (13B) | togethercomputer/CodeLlama-13b-Instruct | 16384 | FP16 |
| Meta | LLaMA-2 Chat (13B) | togethercomputer/llama-2-13b-chat | 4096 | FP16 |
| Meta | LLaMA-2 Chat (7B) | togethercomputer/llama-2-7b-chat | 4096 | FP16 |
| Meta | Meta Llama 3 8B Instruct | meta-llama/Meta-Llama-3-8B-Instruct | 8192 | FP16 |
| Meta | Meta Llama 3 70B Instruct | meta-llama/Meta-Llama-3-70B-Instruct | 8192 | FP16 |
| Meta | Code Llama Instruct (70B) | codellama/CodeLlama-70b-Instruct-hf | 4096 | FP16 |
| Meta | LLaMA-2 Chat (70B) | togethercomputer/llama-2-70b-chat | 4096 | FP16 |
| Meta | Code Llama Instruct (7B) | codellama/CodeLlama-7b-Instruct-hf | 16384 | FP16 |
| Meta | LLaMA-2 Chat (70B) | meta-llama/Llama-2-70b-chat-hf | 4096 | FP16 |
| Meta | Meta Llama 3.1 8B Instruct | meta-llama/Meta-Llama-3.1-8B-Instruct-Reference | 16384 | FP16 |
| Meta | Meta Llama 3.1 70B Instruct Turbo | albert/meta-llama-3-1-70b-instruct-turbo | 131072 | FP16 |
| Meta | Meta Llama 3.1 70B Instruct | meta-llama/Meta-Llama-3.1-70B-Instruct-Reference | 8192 | FP16 |
| Meta | (Free) Llama 3.2 11B Vision Instruct Turbo | meta-llama/Llama-Vision-Free | 131072 | FP16 |
| Meta | Llama 3.2 11B Vision Instruct Turbo | meta-llama/Llama-3.2-11B-Vision-Instruct-Turbo | 131072 | FP16 |
| Meta | Llama 3.2 90B Vision Instruct Turbo | meta-llama/Llama-3.2-90B-Vision-Instruct-Turbo | 131072 | FP16 |
| microsoft | WizardLM-2 (8x22B) | microsoft/WizardLM-2-8x22B | 65536 | FP16 |
| mistralai | Mistral (7B) Instruct | mistralai/Mistral-7B-Instruct-v0.1 | 4096 | FP16 |
| mistralai | Mistral (7B) Instruct v0.2 | mistralai/Mistral-7B-Instruct-v0.2 | 32768 | FP16 |
| mistralai | Mistral (7B) Instruct v0.3 | mistralai/Mistral-7B-Instruct-v0.3 | 32768 | FP16 |
| mistralai | Mixtral-8x7B Instruct v0.1 | mistralai/Mixtral-8x7B-Instruct-v0.1 | 32768 | FP16 |
| mistralai | Mixtral-8x22B Instruct v0.1 | mistralai/Mixtral-8x22B-Instruct-v0.1 | 65536 | FP16 |
| NousResearch | Nous Hermes 2 - Mixtral 8x7B-DPO | NousResearch/Nous-Hermes-2-Mixtral-8x7B-DPO | 32768 | FP16 |
| NousResearch | Nous Hermes LLaMA-2 (70B) | NousResearch/Nous-Hermes-Llama2-70b | 4096 | FP16 |
| NousResearch | Nous Hermes 2 - Mixtral 8x7B-SFT | NousResearch/Nous-Hermes-2-Mixtral-8x7B-SFT | 32768 | FP16 |
| NousResearch | Nous Hermes Llama-2 (13B) | NousResearch/Nous-Hermes-Llama2-13b | 4096 | FP16 |
| NousResearch | Nous Hermes 2 - Mistral DPO (7B) | NousResearch/Nous-Hermes-2-Mistral-7B-DPO | 32768 | FP16 |
| NousResearch | Nous Hermes LLaMA-2 (7B) | NousResearch/Nous-Hermes-llama-2-7b | 4096 | FP16 |
| NousResearch | Nous Capybara v1.9 (7B) | NousResearch/Nous-Capybara-7B-V1p9 | 8192 | FP16 |
| NousResearch | Hermes 2 Theta Llama-3 70B | NousResearch/Hermes-2-Theta-Llama-3-70B | 8192 | FP16 |
| OpenChat | OpenChat 3.5 | openchat/openchat-3.5-1210 | 8192 | FP16 |
| OpenOrca | OpenOrca Mistral (7B) 8K | Open-Orca/Mistral-7B-OpenOrca | 8192 | FP16 |
| Qwen | Qwen 2 Instruct (72B) | Qwen/Qwen2-72B-Instruct | 32768 | FP16 |
| Qwen | Qwen2.5 72B Instruct Turbo | Qwen/Qwen2.5-72B-Instruct-Turbo | 32768 | FP8 |
| Qwen | Qwen2.5 7B Instruct Turbo | Qwen/Qwen2.5-7B-Instruct-Turbo | 32768 | FP8 |
| Qwen | Qwen 1.5 Chat (110B) | Qwen/Qwen1.5-110B-Chat | 32768 | FP16 |
| Qwen | Qwen 1.5 Chat (72B) | Qwen/Qwen1.5-72B-Chat | 32768 | FP16 |
| Qwen | Qwen 2 Instruct (1.5B) | Qwen/Qwen2-1.5B-Instruct | 32768 | FP16 |
| Qwen | Qwen 2 Instruct (7B) | Qwen/Qwen2-7B-Instruct | 32768 | FP16 |
| Qwen | Qwen 1.5 Chat (14B) | Qwen/Qwen1.5-14B-Chat | 32768 | FP16 |
| Qwen | Qwen 1.5 Chat (1.8B) | Qwen/Qwen1.5-1.8B-Chat | 32768 | FP16 |
| Qwen | Qwen 1.5 Chat (32B) | Qwen/Qwen1.5-32B-Chat | 32768 | FP16 |
| Qwen | Qwen 1.5 Chat (7B) | Qwen/Qwen1.5-7B-Chat | 32768 | FP16 |
| Qwen | Qwen 1.5 Chat (0.5B) | Qwen/Qwen1.5-0.5B-Chat | 32768 | FP16 |
| Qwen | Qwen 1.5 Chat (4B) | Qwen/Qwen1.5-4B-Chat | 32768 | FP16 |
| Snorkel AI | Snorkel Mistral PairRM DPO (7B) | snorkelai/Snorkel-Mistral-PairRM-DPO | 32768 | FP16 |
| Snowflake | Snowflake Arctic Instruct | Snowflake/snowflake-arctic-instruct | 4096 | FP16 |
| Stanford | Alpaca (7B) | togethercomputer/alpaca-7b | 2048 | FP16 |
| teknium | OpenHermes-2-Mistral (7B) | teknium/OpenHermes-2-Mistral-7B | 8192 | FP16 |
| teknium | OpenHermes-2.5-Mistral (7B) | teknium/OpenHermes-2p5-Mistral-7B | 8192 | FP16 |
| test | Test 11 | test/test11 | 4096 | FP16 |
| Tim Dettmers | Guanaco (65B) | togethercomputer/guanaco-65b | 2048 | FP16 |
| Tim Dettmers | Guanaco (13B) | togethercomputer/guanaco-13b | 2048 | FP16 |
| Tim Dettmers | Guanaco (33B) | togethercomputer/guanaco-33b | 2048 | FP16 |
| Tim Dettmers | Guanaco (7B) | togethercomputer/guanaco-7b | 2048 | FP16 |
| Undi95 | ReMM SLERP L2 (13B) | Undi95/ReMM-SLERP-L2-13B | 4096 | FP16 |
| Undi95 | Toppy M (7B) | Undi95/Toppy-M-7B | 4096 | FP16 |
| upstage | Upstage SOLAR Instruct v1 (11B) | upstage/SOLAR-10.7B-Instruct-v1.0 | 4096 | FP16 |
| upstage | Upstage SOLAR Instruct v1 (11B)-Int4 | togethercomputer/SOLAR-10.7B-Instruct-v1.0-int4 | 4096 | FP16 |
| WizardLM | WizardLM v1.2 (13B) | WizardLM/WizardLM-13B-V1.2 | 4096 | FP16 |

View file

@ -1,43 +0,0 @@
// Together AI does not provide a simple REST API to get models,
// so we have a table which we copy from their documentation
// https://docs.together.ai/edit/inference-models that we can
// then parse and get all models from in a format that makes sense
// Why this does not exist is so bizarre, but whatever.
// To run, cd into this directory and run `node parse.mjs`
// copy outputs into the export in ../models.js
// Update the date below if you run this again because TogetherAI added new models.
// Last Collected: Nov 20, 2024
// Since last collection Together's docs are broken. I just copied the HTML table
// and had claude3 convert to markdown and it works well enough.
import fs from "fs";
function parseChatModels() {
const fixed = {};
const tableString = fs.readFileSync("chat_models.txt", { encoding: "utf-8" });
const rows = tableString.split("\n").slice(2);
rows.forEach((row) => {
const [provider, name, id, maxLength] = row.split("|").slice(1, -1);
const data = {
provider: provider.trim(),
name: name.trim(),
id: id.trim(),
maxLength: Number(maxLength.trim()),
};
fixed[data.id] = {
id: data.id,
organization: data.provider,
name: data.name,
maxLength: data.maxLength,
};
});
fs.writeFileSync("chat_models.json", JSON.stringify(fixed, null, 2), "utf-8");
return fixed;
}
parseChatModels();

View file

@ -10,7 +10,7 @@ class NovitaProvider extends InheritMultiple([Provider, UnTooled]) {
model;
constructor(config = {}) {
const { model = "gryphe/mythomax-l2-13b" } = config;
const { model = "deepseek/deepseek-r1" } = config;
super();
const client = new OpenAI({
baseURL: "https://api.novita.ai/v3/openai",

View file

@ -37,9 +37,9 @@ class NvidiaNimProvider extends InheritMultiple([Provider, UnTooled]) {
})
.then((result) => {
if (!result.hasOwnProperty("choices"))
throw new Error("Nvidia NIM chat: No results!");
throw new Error("NVIDIA NIM chat: No results!");
if (result.choices.length === 0)
throw new Error("Nvidia NIM chat: No results length!");
throw new Error("NVIDIA NIM chat: No results length!");
return result.choices[0].message.content;
})
.catch((_) => {

View file

@ -4,6 +4,13 @@ const { safeJsonParse } = require("../http");
const Provider = require("./aibitat/providers/ai-provider");
const ImportedPlugin = require("./imported");
// This is a list of skills that are built-in and default enabled.
const DEFAULT_SKILLS = [
AgentPlugins.memory.name,
AgentPlugins.docSummarizer.name,
AgentPlugins.webScraping.name,
];
const USER_AGENT = {
name: "USER",
getDefinition: async () => {
@ -17,16 +24,9 @@ const USER_AGENT = {
const WORKSPACE_AGENT = {
name: "@agent",
getDefinition: async (provider = null) => {
const defaultFunctions = [
AgentPlugins.memory.name, // RAG
AgentPlugins.docSummarizer.name, // Doc Summary
AgentPlugins.webScraping.name, // Collector web-scraping
];
return {
role: Provider.systemPrompt(provider),
functions: [
...defaultFunctions,
...(await agentSkillsFromSystemSettings()),
...(await ImportedPlugin.activeImportedPlugins()),
],
@ -41,10 +41,29 @@ const WORKSPACE_AGENT = {
*/
async function agentSkillsFromSystemSettings() {
const systemFunctions = [];
const _setting = (await SystemSettings.get({ label: "default_agent_skills" }))
?.value;
safeJsonParse(_setting, []).forEach((skillName) => {
// Load non-imported built-in skills that are configurable, but are default enabled.
const _disabledDefaultSkills = safeJsonParse(
await SystemSettings.getValueOrFallback(
{ label: "disabled_agent_skills" },
"[]"
),
[]
);
DEFAULT_SKILLS.forEach((skill) => {
if (!_disabledDefaultSkills.includes(skill))
systemFunctions.push(AgentPlugins[skill].name);
});
// Load non-imported built-in skills that are configurable.
const _setting = safeJsonParse(
await SystemSettings.getValueOrFallback(
{ label: "default_agent_skills" },
"[]"
),
[]
);
_setting.forEach((skillName) => {
if (!AgentPlugins.hasOwnProperty(skillName)) return;
// This is a plugin module with many sub-children plugins who

View file

@ -180,7 +180,7 @@ class AgentHandler {
case "nvidia-nim":
if (!process.env.NVIDIA_NIM_LLM_BASE_PATH)
throw new Error(
"Nvidia NIM base path must be provided to use agents."
"NVIDIA NIM base path must be provided to use agents."
);
break;
@ -245,7 +245,7 @@ class AgentHandler {
case "xai":
return process.env.XAI_LLM_MODEL_PREF ?? "grok-beta";
case "novita":
return process.env.NOVITA_LLM_MODEL_PREF ?? "gryphe/mythomax-l2-13b";
return process.env.NOVITA_LLM_MODEL_PREF ?? "deepseek/deepseek-r1";
case "nvidia-nim":
return process.env.NVIDIA_NIM_LLM_MODEL_PREF ?? null;
default:

View file

@ -13,7 +13,6 @@ const SUPPORT_CUSTOM_MODELS = [
"openai",
"localai",
"ollama",
"native-llm",
"togetherai",
"fireworksai",
"nvidia-nim",
@ -44,13 +43,11 @@ async function getCustomModels(provider = "", apiKey = null, basePath = null) {
case "ollama":
return await ollamaAIModels(basePath);
case "togetherai":
return await getTogetherAiModels();
return await getTogetherAiModels(apiKey);
case "fireworksai":
return await getFireworksAiModels(apiKey);
case "mistral":
return await getMistralModels(apiKey);
case "native-llm":
return nativeLLMModels();
case "perplexity":
return await getPerplexityModels();
case "openrouter":
@ -327,19 +324,21 @@ async function ollamaAIModels(basePath = null) {
return { models, error: null };
}
async function getTogetherAiModels() {
const knownModels = togetherAiModels();
if (!Object.keys(knownModels).length === 0)
return { models: [], error: null };
const models = Object.values(knownModels).map((model) => {
return {
id: model.id,
organization: model.organization,
name: model.name,
};
});
return { models, error: null };
async function getTogetherAiModels(apiKey = null) {
const _apiKey =
apiKey === true
? process.env.TOGETHER_AI_API_KEY
: apiKey || process.env.TOGETHER_AI_API_KEY || null;
try {
const { togetherAiModels } = require("../AiProviders/togetherAi");
const models = await togetherAiModels(_apiKey);
if (models.length > 0 && !!_apiKey)
process.env.TOGETHER_AI_API_KEY = _apiKey;
return { models, error: null };
} catch (error) {
console.error("Error in getTogetherAiModels:", error);
return { models: [], error: "Failed to fetch Together AI models" };
}
}
async function getFireworksAiModels() {
@ -444,26 +443,6 @@ async function getMistralModels(apiKey = null) {
return { models, error: null };
}
function nativeLLMModels() {
const fs = require("fs");
const path = require("path");
const storageDir = path.resolve(
process.env.STORAGE_DIR
? path.resolve(process.env.STORAGE_DIR, "models", "downloaded")
: path.resolve(__dirname, `../../storage/models/downloaded`)
);
if (!fs.existsSync(storageDir))
return { models: [], error: "No model/downloaded storage folder found." };
const files = fs
.readdirSync(storageDir)
.filter((file) => file.toLowerCase().includes(".gguf"))
.map((file) => {
return { id: file, name: file };
});
return { models: files, error: null };
}
async function getElevenLabsModels(apiKey = null) {
const models = (await ElevenLabsTTS.voices(apiKey)).map((model) => {
return {
@ -571,8 +550,8 @@ async function getNvidiaNimModels(basePath = null) {
return { models, error: null };
} catch (e) {
console.error(`Nvidia NIM:getNvidiaNimModels`, e.message);
return { models: [], error: "Could not fetch Nvidia NIM Models" };
console.error(`NVIDIA NIM:getNvidiaNimModels`, e.message);
return { models: [], error: "Could not fetch NVIDIA NIM Models" };
}
}

View file

@ -158,9 +158,6 @@ function getLLMProvider({ provider = null, model = null } = {}) {
case "mistral":
const { MistralLLM } = require("../AiProviders/mistral");
return new MistralLLM(embedder, model);
case "native":
const { NativeLLM } = require("../AiProviders/native");
return new NativeLLM(embedder, model);
case "huggingface":
const { HuggingFaceLLM } = require("../AiProviders/huggingface");
return new HuggingFaceLLM(embedder, model);
@ -302,9 +299,6 @@ function getLLMProviderClass({ provider = null } = {}) {
case "mistral":
const { MistralLLM } = require("../AiProviders/mistral");
return MistralLLM;
case "native":
const { NativeLLM } = require("../AiProviders/native");
return NativeLLM;
case "huggingface":
const { HuggingFaceLLM } = require("../AiProviders/huggingface");
return HuggingFaceLLM;

View file

@ -122,16 +122,6 @@ const KEY_MAPPING = {
checks: [isNotEmpty],
},
// Native LLM Settings
NativeLLMModelPref: {
envKey: "NATIVE_LLM_MODEL_PREF",
checks: [isDownloadedModel],
},
NativeLLMTokenLimit: {
envKey: "NATIVE_LLM_MODEL_TOKEN_LIMIT",
checks: [nonZero],
},
// Hugging Face LLM Inference Settings
HuggingFaceLLMEndpoint: {
envKey: "HUGGING_FACE_LLM_ENDPOINT",
@ -700,7 +690,6 @@ function supportedLLM(input = "") {
"lmstudio",
"localai",
"ollama",
"native",
"togetherai",
"fireworksai",
"mistral",
@ -815,22 +804,6 @@ function requiresForceMode(_, forceModeEnabled = false) {
return forceModeEnabled === true ? null : "Cannot set this setting.";
}
function isDownloadedModel(input = "") {
const fs = require("fs");
const path = require("path");
const storageDir = path.resolve(
process.env.STORAGE_DIR
? path.resolve(process.env.STORAGE_DIR, "models", "downloaded")
: path.resolve(__dirname, `../../storage/models/downloaded`)
);
if (!fs.existsSync(storageDir)) return false;
const files = fs
.readdirSync(storageDir)
.filter((file) => file.includes(".gguf"));
return files.includes(input);
}
async function validDockerizedUrl(input = "") {
if (process.env.ANYTHING_LLM_RUNTIME !== "docker") return null;

View file

@ -1083,18 +1083,6 @@
resolved "https://registry.yarnpkg.com/@js-sdsl/ordered-map/-/ordered-map-4.4.2.tgz#9299f82874bab9e4c7f9c48d865becbfe8d6907c"
integrity sha512-iUKgm52T8HOE/makSxjqoWhe95ZJA1/G1sYsGev2JDKUSS14KAgg1LHb+Ba+IPow0xflbnSkOsZcO08C7w1gYw==
"@kwsites/file-exists@^1.1.1":
version "1.1.1"
resolved "https://registry.yarnpkg.com/@kwsites/file-exists/-/file-exists-1.1.1.tgz#ad1efcac13e1987d8dbaf235ef3be5b0d96faa99"
integrity sha512-m9/5YGR18lIwxSFDwfE3oA7bWuq9kdau6ugN4H2rJeyhFQZcG9AgSHkQtSD15a8WvTgfz9aikZMrKPHvbpqFiw==
dependencies:
debug "^4.1.1"
"@kwsites/promise-deferred@^1.1.1":
version "1.1.1"
resolved "https://registry.yarnpkg.com/@kwsites/promise-deferred/-/promise-deferred-1.1.1.tgz#8ace5259254426ccef57f3175bc64ed7095ed919"
integrity sha512-GaHYm+c0O9MjZRu0ongGBRbinu8gVAMd2UZjji6jVmqKtZluZnptXGWhz1E8j8D2HJ3f/yMxKAUC0b+57wncIw==
"@ladjs/graceful@^3.2.2":
version "3.2.2"
resolved "https://registry.yarnpkg.com/@ladjs/graceful/-/graceful-3.2.2.tgz#1b141a9dc2604df99177d6714dbe4a0bff5e2ddf"
@ -1294,241 +1282,6 @@
"@nodelib/fs.scandir" "2.1.5"
fastq "^1.6.0"
"@octokit/app@^14.0.2":
version "14.1.0"
resolved "https://registry.yarnpkg.com/@octokit/app/-/app-14.1.0.tgz#2d491dc70746773b83f61edf5c56817dd7d3854b"
integrity sha512-g3uEsGOQCBl1+W1rgfwoRFUIR6PtvB2T1E4RpygeUU5LrLvlOqcxrt5lfykIeRpUPpupreGJUYl70fqMDXdTpw==
dependencies:
"@octokit/auth-app" "^6.0.0"
"@octokit/auth-unauthenticated" "^5.0.0"
"@octokit/core" "^5.0.0"
"@octokit/oauth-app" "^6.0.0"
"@octokit/plugin-paginate-rest" "^9.0.0"
"@octokit/types" "^12.0.0"
"@octokit/webhooks" "^12.0.4"
"@octokit/auth-app@^6.0.0":
version "6.1.1"
resolved "https://registry.yarnpkg.com/@octokit/auth-app/-/auth-app-6.1.1.tgz#758a5d2e0324c750f7463b10398fd99c52b2eb89"
integrity sha512-VrTtzRpyuT5nYGUWeGWQqH//hqEZDV+/yb6+w5wmWpmmUA1Tx950XsAc2mBBfvusfcdF2E7w8jZ1r1WwvfZ9pA==
dependencies:
"@octokit/auth-oauth-app" "^7.1.0"
"@octokit/auth-oauth-user" "^4.1.0"
"@octokit/request" "^8.3.1"
"@octokit/request-error" "^5.1.0"
"@octokit/types" "^13.1.0"
deprecation "^2.3.1"
lru-cache "^10.0.0"
universal-github-app-jwt "^1.1.2"
universal-user-agent "^6.0.0"
"@octokit/auth-oauth-app@^7.0.0", "@octokit/auth-oauth-app@^7.1.0":
version "7.1.0"
resolved "https://registry.yarnpkg.com/@octokit/auth-oauth-app/-/auth-oauth-app-7.1.0.tgz#d0f74e19ebd5a4829cb780c107cedd6c894f20fc"
integrity sha512-w+SyJN/b0l/HEb4EOPRudo7uUOSW51jcK1jwLa+4r7PA8FPFpoxEnHBHMITqCsc/3Vo2qqFjgQfz/xUUvsSQnA==
dependencies:
"@octokit/auth-oauth-device" "^6.1.0"
"@octokit/auth-oauth-user" "^4.1.0"
"@octokit/request" "^8.3.1"
"@octokit/types" "^13.0.0"
"@types/btoa-lite" "^1.0.0"
btoa-lite "^1.0.0"
universal-user-agent "^6.0.0"
"@octokit/auth-oauth-device@^6.1.0":
version "6.1.0"
resolved "https://registry.yarnpkg.com/@octokit/auth-oauth-device/-/auth-oauth-device-6.1.0.tgz#f868213a3db05fe27e68d1fc607502a322379dd9"
integrity sha512-FNQ7cb8kASufd6Ej4gnJ3f1QB5vJitkoV1O0/g6e6lUsQ7+VsSNRHRmFScN2tV4IgKA12frrr/cegUs0t+0/Lw==
dependencies:
"@octokit/oauth-methods" "^4.1.0"
"@octokit/request" "^8.3.1"
"@octokit/types" "^13.0.0"
universal-user-agent "^6.0.0"
"@octokit/auth-oauth-user@^4.0.0", "@octokit/auth-oauth-user@^4.1.0":
version "4.1.0"
resolved "https://registry.yarnpkg.com/@octokit/auth-oauth-user/-/auth-oauth-user-4.1.0.tgz#32e5529f8bd961af9839a1f8c6ab0c8ad2184eee"
integrity sha512-FrEp8mtFuS/BrJyjpur+4GARteUCrPeR/tZJzD8YourzoVhRics7u7we/aDcKv+yywRNwNi/P4fRi631rG/OyQ==
dependencies:
"@octokit/auth-oauth-device" "^6.1.0"
"@octokit/oauth-methods" "^4.1.0"
"@octokit/request" "^8.3.1"
"@octokit/types" "^13.0.0"
btoa-lite "^1.0.0"
universal-user-agent "^6.0.0"
"@octokit/auth-token@^4.0.0":
version "4.0.0"
resolved "https://registry.yarnpkg.com/@octokit/auth-token/-/auth-token-4.0.0.tgz#40d203ea827b9f17f42a29c6afb93b7745ef80c7"
integrity sha512-tY/msAuJo6ARbK6SPIxZrPBms3xPbfwBrulZe0Wtr/DIY9lje2HeV1uoebShn6mx7SjCHif6EjMvoREj+gZ+SA==
"@octokit/auth-unauthenticated@^5.0.0":
version "5.0.1"
resolved "https://registry.yarnpkg.com/@octokit/auth-unauthenticated/-/auth-unauthenticated-5.0.1.tgz#d8032211728333068b2e07b53997c29e59a03507"
integrity sha512-oxeWzmBFxWd+XolxKTc4zr+h3mt+yofn4r7OfoIkR/Cj/o70eEGmPsFbueyJE2iBAGpjgTnEOKM3pnuEGVmiqg==
dependencies:
"@octokit/request-error" "^5.0.0"
"@octokit/types" "^12.0.0"
"@octokit/core@^5.0.0":
version "5.2.0"
resolved "https://registry.yarnpkg.com/@octokit/core/-/core-5.2.0.tgz#ddbeaefc6b44a39834e1bb2e58a49a117672a7ea"
integrity sha512-1LFfa/qnMQvEOAdzlQymH0ulepxbxnCYAKJZfMci/5XJyIHWgEYnDmgnKakbTh7CH2tFQ5O60oYDvns4i9RAIg==
dependencies:
"@octokit/auth-token" "^4.0.0"
"@octokit/graphql" "^7.1.0"
"@octokit/request" "^8.3.1"
"@octokit/request-error" "^5.1.0"
"@octokit/types" "^13.0.0"
before-after-hook "^2.2.0"
universal-user-agent "^6.0.0"
"@octokit/endpoint@^9.0.1":
version "9.0.5"
resolved "https://registry.yarnpkg.com/@octokit/endpoint/-/endpoint-9.0.5.tgz#e6c0ee684e307614c02fc6ac12274c50da465c44"
integrity sha512-ekqR4/+PCLkEBF6qgj8WqJfvDq65RH85OAgrtnVp1mSxaXF03u2xW/hUdweGS5654IlC0wkNYC18Z50tSYTAFw==
dependencies:
"@octokit/types" "^13.1.0"
universal-user-agent "^6.0.0"
"@octokit/graphql@^7.1.0":
version "7.1.0"
resolved "https://registry.yarnpkg.com/@octokit/graphql/-/graphql-7.1.0.tgz#9bc1c5de92f026648131f04101cab949eeffe4e0"
integrity sha512-r+oZUH7aMFui1ypZnAvZmn0KSqAUgE1/tUXIWaqUCa1758ts/Jio84GZuzsvUkme98kv0WFY8//n0J1Z+vsIsQ==
dependencies:
"@octokit/request" "^8.3.0"
"@octokit/types" "^13.0.0"
universal-user-agent "^6.0.0"
"@octokit/oauth-app@^6.0.0":
version "6.1.0"
resolved "https://registry.yarnpkg.com/@octokit/oauth-app/-/oauth-app-6.1.0.tgz#22c276f6ad2364c6999837bfdd5d9c1092838726"
integrity sha512-nIn/8eUJ/BKUVzxUXd5vpzl1rwaVxMyYbQkNZjHrF7Vk/yu98/YDF/N2KeWO7uZ0g3b5EyiFXFkZI8rJ+DH1/g==
dependencies:
"@octokit/auth-oauth-app" "^7.0.0"
"@octokit/auth-oauth-user" "^4.0.0"
"@octokit/auth-unauthenticated" "^5.0.0"
"@octokit/core" "^5.0.0"
"@octokit/oauth-authorization-url" "^6.0.2"
"@octokit/oauth-methods" "^4.0.0"
"@types/aws-lambda" "^8.10.83"
universal-user-agent "^6.0.0"
"@octokit/oauth-authorization-url@^6.0.2":
version "6.0.2"
resolved "https://registry.yarnpkg.com/@octokit/oauth-authorization-url/-/oauth-authorization-url-6.0.2.tgz#cc82ca29cc5e339c9921672f39f2b3f5c8eb6ef2"
integrity sha512-CdoJukjXXxqLNK4y/VOiVzQVjibqoj/xHgInekviUJV73y/BSIcwvJ/4aNHPBPKcPWFnd4/lO9uqRV65jXhcLA==
"@octokit/oauth-methods@^4.0.0", "@octokit/oauth-methods@^4.1.0":
version "4.1.0"
resolved "https://registry.yarnpkg.com/@octokit/oauth-methods/-/oauth-methods-4.1.0.tgz#1403ac9c4d4e277922fddc4c89fa8a782f8f791b"
integrity sha512-4tuKnCRecJ6CG6gr0XcEXdZtkTDbfbnD5oaHBmLERTjTMZNi2CbfEHZxPU41xXLDG4DfKf+sonu00zvKI9NSbw==
dependencies:
"@octokit/oauth-authorization-url" "^6.0.2"
"@octokit/request" "^8.3.1"
"@octokit/request-error" "^5.1.0"
"@octokit/types" "^13.0.0"
btoa-lite "^1.0.0"
"@octokit/openapi-types@^20.0.0":
version "20.0.0"
resolved "https://registry.yarnpkg.com/@octokit/openapi-types/-/openapi-types-20.0.0.tgz#9ec2daa0090eeb865ee147636e0c00f73790c6e5"
integrity sha512-EtqRBEjp1dL/15V7WiX5LJMIxxkdiGJnabzYx5Apx4FkQIFgAfKumXeYAqqJCj1s+BMX4cPFIFC4OLCR6stlnA==
"@octokit/openapi-types@^22.2.0":
version "22.2.0"
resolved "https://registry.yarnpkg.com/@octokit/openapi-types/-/openapi-types-22.2.0.tgz#75aa7dcd440821d99def6a60b5f014207ae4968e"
integrity sha512-QBhVjcUa9W7Wwhm6DBFu6ZZ+1/t/oYxqc2tp81Pi41YNuJinbFRx8B133qVOrAaBbF7D/m0Et6f9/pZt9Rc+tg==
"@octokit/plugin-paginate-graphql@^4.0.0":
version "4.0.1"
resolved "https://registry.yarnpkg.com/@octokit/plugin-paginate-graphql/-/plugin-paginate-graphql-4.0.1.tgz#9c0b1145b93a2b8635943f497c127969d54512fc"
integrity sha512-R8ZQNmrIKKpHWC6V2gum4x9LG2qF1RxRjo27gjQcG3j+vf2tLsEfE7I/wRWEPzYMaenr1M+qDAtNcwZve1ce1A==
"@octokit/plugin-paginate-rest@^9.0.0":
version "9.2.1"
resolved "https://registry.yarnpkg.com/@octokit/plugin-paginate-rest/-/plugin-paginate-rest-9.2.1.tgz#2e2a2f0f52c9a4b1da1a3aa17dabe3c459b9e401"
integrity sha512-wfGhE/TAkXZRLjksFXuDZdmGnJQHvtU/joFQdweXUgzo1XwvBCD4o4+75NtFfjfLK5IwLf9vHTfSiU3sLRYpRw==
dependencies:
"@octokit/types" "^12.6.0"
"@octokit/plugin-rest-endpoint-methods@^10.0.0":
version "10.4.1"
resolved "https://registry.yarnpkg.com/@octokit/plugin-rest-endpoint-methods/-/plugin-rest-endpoint-methods-10.4.1.tgz#41ba478a558b9f554793075b2e20cd2ef973be17"
integrity sha512-xV1b+ceKV9KytQe3zCVqjg+8GTGfDYwaT1ATU5isiUyVtlVAO3HNdzpS4sr4GBx4hxQ46s7ITtZrAsxG22+rVg==
dependencies:
"@octokit/types" "^12.6.0"
"@octokit/plugin-retry@^6.0.0":
version "6.0.1"
resolved "https://registry.yarnpkg.com/@octokit/plugin-retry/-/plugin-retry-6.0.1.tgz#3257404f7cc418e1c1f13a7f2012c1db848b7693"
integrity sha512-SKs+Tz9oj0g4p28qkZwl/topGcb0k0qPNX/i7vBKmDsjoeqnVfFUquqrE/O9oJY7+oLzdCtkiWSXLpLjvl6uog==
dependencies:
"@octokit/request-error" "^5.0.0"
"@octokit/types" "^12.0.0"
bottleneck "^2.15.3"
"@octokit/plugin-throttling@^8.0.0":
version "8.2.0"
resolved "https://registry.yarnpkg.com/@octokit/plugin-throttling/-/plugin-throttling-8.2.0.tgz#9ec3ea2e37b92fac63f06911d0c8141b46dc4941"
integrity sha512-nOpWtLayKFpgqmgD0y3GqXafMFuKcA4tRPZIfu7BArd2lEZeb1988nhWhwx4aZWmjDmUfdgVf7W+Tt4AmvRmMQ==
dependencies:
"@octokit/types" "^12.2.0"
bottleneck "^2.15.3"
"@octokit/request-error@^5.0.0", "@octokit/request-error@^5.1.0":
version "5.1.0"
resolved "https://registry.yarnpkg.com/@octokit/request-error/-/request-error-5.1.0.tgz#ee4138538d08c81a60be3f320cd71063064a3b30"
integrity sha512-GETXfE05J0+7H2STzekpKObFe765O5dlAKUTLNGeH+x47z7JjXHfsHKo5z21D/o/IOZTUEI6nyWyR+bZVP/n5Q==
dependencies:
"@octokit/types" "^13.1.0"
deprecation "^2.0.0"
once "^1.4.0"
"@octokit/request@^8.3.0", "@octokit/request@^8.3.1":
version "8.4.0"
resolved "https://registry.yarnpkg.com/@octokit/request/-/request-8.4.0.tgz#7f4b7b1daa3d1f48c0977ad8fffa2c18adef8974"
integrity sha512-9Bb014e+m2TgBeEJGEbdplMVWwPmL1FPtggHQRkV+WVsMggPtEkLKPlcVYm/o8xKLkpJ7B+6N8WfQMtDLX2Dpw==
dependencies:
"@octokit/endpoint" "^9.0.1"
"@octokit/request-error" "^5.1.0"
"@octokit/types" "^13.1.0"
universal-user-agent "^6.0.0"
"@octokit/types@^12.0.0", "@octokit/types@^12.2.0", "@octokit/types@^12.6.0":
version "12.6.0"
resolved "https://registry.yarnpkg.com/@octokit/types/-/types-12.6.0.tgz#8100fb9eeedfe083aae66473bd97b15b62aedcb2"
integrity sha512-1rhSOfRa6H9w4YwK0yrf5faDaDTb+yLyBUKOCV4xtCDB5VmIPqd/v9yr9o6SAzOAlRxMiRiCic6JVM1/kunVkw==
dependencies:
"@octokit/openapi-types" "^20.0.0"
"@octokit/types@^13.0.0", "@octokit/types@^13.1.0":
version "13.5.0"
resolved "https://registry.yarnpkg.com/@octokit/types/-/types-13.5.0.tgz#4796e56b7b267ebc7c921dcec262b3d5bfb18883"
integrity sha512-HdqWTf5Z3qwDVlzCrP8UJquMwunpDiMPt5er+QjGzL4hqr/vBVY/MauQgS1xWxCDT1oMx1EULyqxncdCY/NVSQ==
dependencies:
"@octokit/openapi-types" "^22.2.0"
"@octokit/webhooks-methods@^4.1.0":
version "4.1.0"
resolved "https://registry.yarnpkg.com/@octokit/webhooks-methods/-/webhooks-methods-4.1.0.tgz#681a6c86c9b21d4ec9e29108fb053ae7512be033"
integrity sha512-zoQyKw8h9STNPqtm28UGOYFE7O6D4Il8VJwhAtMHFt2C4L0VQT1qGKLeefUOqHNs1mNRYSadVv7x0z8U2yyeWQ==
"@octokit/webhooks-types@7.4.0":
version "7.4.0"
resolved "https://registry.yarnpkg.com/@octokit/webhooks-types/-/webhooks-types-7.4.0.tgz#7ed15c75908683a34e0079c80f261fe568b87395"
integrity sha512-FE2V+QZ2UYlh+9wWd5BPLNXG+J/XUD/PPq0ovS+nCcGX4+3qVbi3jYOmCTW48hg9SBBLtInx9+o7fFt4H5iP0Q==
"@octokit/webhooks@^12.0.4":
version "12.2.0"
resolved "https://registry.yarnpkg.com/@octokit/webhooks/-/webhooks-12.2.0.tgz#ea1ee2d9d9c5a4b7b53ff8bc64a9feb0dac94161"
integrity sha512-CyuLJ0/P7bKZ+kIYw+fnkeVdhUzNuDKgNSI7pU/m7Nod0T7kP+s4s2f0pNmG9HL8/RZN1S0ZWTDll3VTMrFLAw==
dependencies:
"@octokit/request-error" "^5.0.0"
"@octokit/webhooks-methods" "^4.1.0"
"@octokit/webhooks-types" "7.4.0"
aggregate-error "^3.1.0"
"@petamoriken/float16@^3.8.6":
version "3.8.6"
resolved "https://registry.yarnpkg.com/@petamoriken/float16/-/float16-3.8.6.tgz#580701cb97a510882342333d31c7cbfd9e14b4f4"
@ -2107,16 +1860,6 @@
resolved "https://registry.yarnpkg.com/@tediousjs/connection-string/-/connection-string-0.5.0.tgz#9b3d858c040aac6bdf5584bf45370cef5b6522b4"
integrity sha512-7qSgZbincDDDFyRweCIEvZULFAw5iz/DeunhvuxpL31nfntX3P4Yd4HkHBRg9H8CdqY1e5WFN1PZIz/REL9MVQ==
"@types/aws-lambda@^8.10.83":
version "8.10.137"
resolved "https://registry.yarnpkg.com/@types/aws-lambda/-/aws-lambda-8.10.137.tgz#c9998a944541afdd6df0d159e9ec9c23dfe5fb40"
integrity sha512-YNFwzVarXAOXkjuFxONyDw1vgRNzyH8AuyN19s0bM+ChSu/bzxb5XPxYFLXoqoM+tvgzwR3k7fXcEOW125yJxg==
"@types/btoa-lite@^1.0.0":
version "1.0.2"
resolved "https://registry.yarnpkg.com/@types/btoa-lite/-/btoa-lite-1.0.2.tgz#82bb6aab00abf7cff3ca2825abe010c0cd536ae5"
integrity sha512-ZYbcE2x7yrvNFJiU7xJGrpF/ihpkM7zKgw8bha3LNJSesvTtUNxbpzaT7WXBIryf6jovisrxTBvymxMeLLj1Mg==
"@types/command-line-args@^5.2.1":
version "5.2.3"
resolved "https://registry.yarnpkg.com/@types/command-line-args/-/command-line-args-5.2.3.tgz#553ce2fd5acf160b448d307649b38ffc60d39639"
@ -2127,13 +1870,6 @@
resolved "https://registry.yarnpkg.com/@types/command-line-usage/-/command-line-usage-5.0.4.tgz#374e4c62d78fbc5a670a0f36da10235af879a0d5"
integrity sha512-BwR5KP3Es/CSht0xqBcUXS3qCAUVXwpRKsV2+arxeb65atasuXG9LykC9Ab10Cw3s2raH92ZqOeILaQbsB2ACg==
"@types/jsonwebtoken@^9.0.0":
version "9.0.6"
resolved "https://registry.yarnpkg.com/@types/jsonwebtoken/-/jsonwebtoken-9.0.6.tgz#d1af3544d99ad992fb6681bbe60676e06b032bd3"
integrity sha512-/5hndP5dCjloafCXns6SZyESp3Ldq7YjH3zwzwczYnjxIT0Fqzk5ROSYVGfFyczIue7IUEj8hkvLbPoLQ18vQw==
dependencies:
"@types/node" "*"
"@types/lodash@^4.14.165":
version "4.17.6"
resolved "https://registry.yarnpkg.com/@types/lodash/-/lodash-4.17.6.tgz#193ced6a40c8006cfc1ca3f4553444fb38f0e543"
@ -2299,14 +2035,6 @@ agentkeepalive@^4.2.1:
dependencies:
humanize-ms "^1.2.1"
aggregate-error@^3.1.0:
version "3.1.0"
resolved "https://registry.yarnpkg.com/aggregate-error/-/aggregate-error-3.1.0.tgz#92670ff50f5359bdb7a3e0d40d0ec30c5737687a"
integrity sha512-4I7Td01quW/RpocfNayFdFVk1qSuoh0E7JrbRJ16nH01HhKFQ88INq9Sd+nd72zqRySlr9BmDA8xlEJ6vJMrYA==
dependencies:
clean-stack "^2.0.0"
indent-string "^4.0.0"
ajv@^6.12.4:
version "6.12.6"
resolved "https://registry.yarnpkg.com/ajv/-/ajv-6.12.6.tgz#baf5a62e802b07d977034586f8c3baf5adf26df4"
@ -2339,11 +2067,6 @@ ansi-regex@^5.0.1:
resolved "https://registry.yarnpkg.com/ansi-regex/-/ansi-regex-5.0.1.tgz#082cb2c89c9fe8659a311a53bd6a4dc5301db304"
integrity sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==
ansi-regex@^6.0.1:
version "6.0.1"
resolved "https://registry.yarnpkg.com/ansi-regex/-/ansi-regex-6.0.1.tgz#3183e38fae9a65d7cb5e53945cd5897d0260a06a"
integrity sha512-n5M855fKb2SsfMIiFFoVrABHJC8QtHwVx+mHWP3QcEqBHYienj5dHSgjbxtC0WEZXYt4wcD6zrQElDPhFuZgfA==
ansi-styles@^4.0.0, ansi-styles@^4.1.0:
version "4.3.0"
resolved "https://registry.yarnpkg.com/ansi-styles/-/ansi-styles-4.3.0.tgz#edd803628ae71c04c85ae7a0906edad34b648937"
@ -2397,14 +2120,6 @@ are-we-there-yet@^2.0.0:
delegates "^1.0.0"
readable-stream "^3.6.0"
are-we-there-yet@^3.0.0:
version "3.0.1"
resolved "https://registry.yarnpkg.com/are-we-there-yet/-/are-we-there-yet-3.0.1.tgz#679df222b278c64f2cdba1175cdc00b0d96164bd"
integrity sha512-QZW4EDmGwlYur0Yyf/b2uGucHQMa8aFUP7eu9ddR73vvhFyt4V0Vl3QHPcTNJ8l6qYOBdxgXdnBXQrHilfRQBg==
dependencies:
delegates "^1.0.0"
readable-stream "^3.6.0"
argparse@^2.0.1:
version "2.0.1"
resolved "https://registry.yarnpkg.com/argparse/-/argparse-2.0.1.tgz#246f50f3ca78a3240f6c997e8a9bd1eac49e4b38"
@ -2534,7 +2249,7 @@ aws-ssl-profiles@^1.1.1:
resolved "https://registry.yarnpkg.com/aws-ssl-profiles/-/aws-ssl-profiles-1.1.1.tgz#21ef8ad77d753927f6c01b144c5ef4cc4f150cdc"
integrity sha512-+H+kuK34PfMaI9PNU/NSjBKL5hh/KDM9J72kwYeYEm0A8B1AC4fuCy3qsjnA7lxklgyXsB68yn8Z2xoZEjgwCQ==
axios@^1.4.0, axios@^1.6.2, axios@^1.6.5:
axios@^1.4.0, axios@^1.6.2:
version "1.6.8"
resolved "https://registry.yarnpkg.com/axios/-/axios-1.6.8.tgz#66d294951f5d988a00e87a0ffb955316a619ea66"
integrity sha512-v/ZHtJDU39mDpyBoFVkETcd/uNdxrWRrg3bKpOKzXFA6Bvqopts6ALSMU3y6ijYxbw2B+wPrIv46egTzJXCLGQ==
@ -2604,11 +2319,6 @@ bcrypt@^5.1.0:
"@mapbox/node-pre-gyp" "^1.0.11"
node-addon-api "^5.0.0"
before-after-hook@^2.2.0:
version "2.2.3"
resolved "https://registry.yarnpkg.com/before-after-hook/-/before-after-hook-2.2.3.tgz#c51e809c81a4e354084422b9b26bad88249c517c"
integrity sha512-NzUnlZexiaH/46WDhANlyR2bXRopNg4F/zuSA3OpZnllCUgRaOF2znDioDWrmbNVsuZk6l9pMquQB38cfBZwkQ==
binary-extensions@^2.0.0, binary-extensions@^2.2.0:
version "2.3.0"
resolved "https://registry.yarnpkg.com/binary-extensions/-/binary-extensions-2.3.0.tgz#f6e14a97858d327252200242d4ccfe522c445522"
@ -2628,15 +2338,6 @@ bl@^4.0.3:
inherits "^2.0.4"
readable-stream "^3.4.0"
bl@^5.0.0:
version "5.1.0"
resolved "https://registry.yarnpkg.com/bl/-/bl-5.1.0.tgz#183715f678c7188ecef9fe475d90209400624273"
integrity sha512-tv1ZJHLfTDnXE6tMHv73YgSJaWR2AFuPwMntBe7XL/GBFHnT0CLnsHMogfk5+GzCDC5ZWarSCYaIGATZt9dNsQ==
dependencies:
buffer "^6.0.3"
inherits "^2.0.4"
readable-stream "^3.4.0"
bl@^6.0.3:
version "6.0.12"
resolved "https://registry.yarnpkg.com/bl/-/bl-6.0.12.tgz#77c35b96e13aeff028496c798b75389ddee9c7f8"
@ -2675,11 +2376,6 @@ boolean@^3.2.0:
resolved "https://registry.yarnpkg.com/boolean/-/boolean-3.2.0.tgz#9e5294af4e98314494cbb17979fa54ca159f116b"
integrity sha512-d0II/GO9uf9lfUHH2BQsjxzRJZBdsjgsBiW4BvhWk/3qoKwQFjIDVN19PfX8F2D/r9PCMTtLWjYVCFrpeYUzsw==
bottleneck@^2.15.3:
version "2.19.5"
resolved "https://registry.yarnpkg.com/bottleneck/-/bottleneck-2.19.5.tgz#5df0b90f59fd47656ebe63c78a98419205cadd91"
integrity sha512-VHiNCbI1lKdl44tGrhNfU3lup0Tj/ZBMJB5/2ZbNXRCPuRCO7ed2mgcK4r17y+KB2EfuYuRaVlwNbAeaWGSpbw==
bowser@^2.11.0:
version "2.11.0"
resolved "https://registry.yarnpkg.com/bowser/-/bowser-2.11.0.tgz#5ca3c35757a7aa5771500c70a73a9f91ef420a8f"
@ -2705,11 +2401,6 @@ bson@^6.2.0:
resolved "https://registry.yarnpkg.com/bson/-/bson-6.6.0.tgz#f225137eb49fe19bee4d87949a0515c05176e2ad"
integrity sha512-BVINv2SgcMjL4oYbBuCQTpE3/VKOSxrOA8Cj/wQP7izSzlBGVomdm+TcUd0Pzy0ytLSSDweCKQ6X3f5veM5LQA==
btoa-lite@^1.0.0:
version "1.0.0"
resolved "https://registry.yarnpkg.com/btoa-lite/-/btoa-lite-1.0.0.tgz#337766da15801210fdd956c22e9c6891ab9d0337"
integrity sha512-gvW7InbIyF8AicrqWoptdW08pUxuhq8BEgowNajy9RhiE86fmGAGl+bLKo6oB8QP0CkqHLowfN0oJdKC/J6LbA==
buffer-equal-constant-time@1.0.1:
version "1.0.1"
resolved "https://registry.yarnpkg.com/buffer-equal-constant-time/-/buffer-equal-constant-time-1.0.1.tgz#f8e71132f7ffe6e01a5c9697a4c6f3e48d5cc819"
@ -2784,11 +2475,6 @@ chalk@^4, chalk@^4.0.0, chalk@^4.1.2:
ansi-styles "^4.1.0"
supports-color "^7.1.0"
chalk@^5.0.0, chalk@^5.3.0:
version "5.3.0"
resolved "https://registry.yarnpkg.com/chalk/-/chalk-5.3.0.tgz#67c20a7ebef70e7f3970a01f90fa210cb6860385"
integrity sha512-dLitG79d+GV1Nb/VYcCDFivJeK1hiukt9QjRNVOsUtTy1rR1YJsmpGGTZ3qJos+uw7WmWF4wUwBd9jxjocFC2w==
chardet@^0.7.0:
version "0.7.0"
resolved "https://registry.yarnpkg.com/chardet/-/chardet-0.7.0.tgz#90094849f0937f2eedc2425d0d28a9e5f0cbad9e"
@ -2804,11 +2490,6 @@ check-disk-space@^3.4.0:
resolved "https://registry.yarnpkg.com/check-disk-space/-/check-disk-space-3.4.0.tgz#eb8e69eee7a378fd12e35281b8123a8b4c4a8ff7"
integrity sha512-drVkSqfwA+TvuEhFipiR1OC9boEGZL5RrWvVsOthdcvQNXyCCuKkEiTOTXZ7qxSf/GLwq4GvzfrQD/Wz325hgw==
chmodrp@^1.0.2:
version "1.0.2"
resolved "https://registry.yarnpkg.com/chmodrp/-/chmodrp-1.0.2.tgz#d1c0075dfcc97fe7f5f924252438a84bd5d26a9a"
integrity sha512-TdngOlFV1FLTzU0o1w8MB6/BFywhtLC0SzRTGJU7T9lmdjlCWeMRt1iVo0Ki+ldwNk0BqNiKoc8xpLZEQ8mY1w==
chokidar@^3.5.2:
version "3.6.0"
resolved "https://registry.yarnpkg.com/chokidar/-/chokidar-3.6.0.tgz#197c6cc669ef2a8dc5e7b4d97ee4e092c3eb0d5b"
@ -2842,26 +2523,7 @@ chromadb@^1.5.2:
cliui "^8.0.1"
isomorphic-fetch "^3.0.0"
clean-stack@^2.0.0:
version "2.2.0"
resolved "https://registry.yarnpkg.com/clean-stack/-/clean-stack-2.2.0.tgz#ee8472dbb129e727b31e8a10a427dee9dfe4008b"
integrity sha512-4diC9HaTE+KRAMWhDhrGOECgWZxoevMc5TlkObMqNSsVU62PYzXZ/SMTjzyGAFF1YusgxGcSWTEXBhp0CPwQ1A==
cli-cursor@^4.0.0:
version "4.0.0"
resolved "https://registry.yarnpkg.com/cli-cursor/-/cli-cursor-4.0.0.tgz#3cecfe3734bf4fe02a8361cbdc0f6fe28c6a57ea"
integrity sha512-VGtlMu3x/4DOtIUwEkRezxUZ2lBacNJCHash0N0WeZDBS+7Ux1dm3XWAgWYxLJFMMdOeXMHXorshEFhbMSGelg==
dependencies:
restore-cursor "^4.0.0"
cli-progress@^3.12.0:
version "3.12.0"
resolved "https://registry.yarnpkg.com/cli-progress/-/cli-progress-3.12.0.tgz#807ee14b66bcc086258e444ad0f19e7d42577942"
integrity sha512-tRkV3HJ1ASwm19THiiLIXLO7Im7wlTuKnvkYaTkyoAPefqjNg7W7DHKUlGRxy9vxDvbyCYQkQozvptuMkGCg8A==
dependencies:
string-width "^4.2.3"
cli-spinners@^2.9.0, cli-spinners@^2.9.2:
cli-spinners@^2.9.2:
version "2.9.2"
resolved "https://registry.yarnpkg.com/cli-spinners/-/cli-spinners-2.9.2.tgz#1773a8f4b9c4d6ac31563df53b3fc1d79462fe41"
integrity sha512-ywqV+5MmyL4E7ybXgKys4DugZbX0FC6LnwrhjuykIjnK9k8OQacQ7axGKnjDXWNhns0xot3bZI5h55H8yo9cJg==
@ -2880,25 +2542,6 @@ cliui@^8.0.1:
strip-ansi "^6.0.1"
wrap-ansi "^7.0.0"
cmake-js@^7.2.1:
version "7.3.0"
resolved "https://registry.yarnpkg.com/cmake-js/-/cmake-js-7.3.0.tgz#6fd6234b7aeec4545c1c806f9e3f7ffacd9798b2"
integrity sha512-dXs2zq9WxrV87bpJ+WbnGKv8WUBXDw8blNiwNHoRe/it+ptscxhQHKB1SJXa1w+kocLMeP28Tk4/eTCezg4o+w==
dependencies:
axios "^1.6.5"
debug "^4"
fs-extra "^11.2.0"
lodash.isplainobject "^4.0.6"
memory-stream "^1.0.0"
node-api-headers "^1.1.0"
npmlog "^6.0.2"
rc "^1.2.7"
semver "^7.5.4"
tar "^6.2.0"
url-join "^4.0.1"
which "^2.0.2"
yargs "^17.7.2"
cohere-ai@^7.9.5:
version "7.9.5"
resolved "https://registry.yarnpkg.com/cohere-ai/-/cohere-ai-7.9.5.tgz#05a592fe19decb8692d1b19d93ac835d7f816b8b"
@ -2942,7 +2585,7 @@ color-string@^1.6.0, color-string@^1.9.0:
color-name "^1.0.0"
simple-swizzle "^0.2.2"
color-support@^1.1.2, color-support@^1.1.3:
color-support@^1.1.2:
version "1.1.3"
resolved "https://registry.yarnpkg.com/color-support/-/color-support-1.1.3.tgz#93834379a1cc9a0c61f82f52f0d04322251bd5a2"
integrity sha512-qiBjkpbMLO/HL68y+lh4q0/O1MZFj2RX6X/KmMa3+gJD3z+WwI1ZzDHysvqHGS3mP6mznPckpXmw1nI9cJjyRg==
@ -3093,13 +2736,6 @@ cron-validate@^1.4.5:
dependencies:
yup "0.32.9"
cross-env@^7.0.3:
version "7.0.3"
resolved "https://registry.yarnpkg.com/cross-env/-/cross-env-7.0.3.tgz#865264b29677dc015ba8418918965dd232fc54cf"
integrity sha512-+/HKd6EgcQCJGh2PSjZuUitQBQynKor4wrFbRg4DtAgS1aWO+gU52xpH7M9ScGgXSYmAVS9bIJ8EzuaGw0oNAw==
dependencies:
cross-spawn "^7.0.1"
cross-fetch@^3.1.5:
version "3.1.8"
resolved "https://registry.yarnpkg.com/cross-fetch/-/cross-fetch-3.1.8.tgz#0327eba65fd68a7d119f8fb2bf9334a1a7956f82"
@ -3107,7 +2743,7 @@ cross-fetch@^3.1.5:
dependencies:
node-fetch "^2.6.12"
cross-spawn@^7.0.1, cross-spawn@^7.0.2, cross-spawn@^7.0.3:
cross-spawn@^7.0.2, cross-spawn@^7.0.3:
version "7.0.3"
resolved "https://registry.yarnpkg.com/cross-spawn/-/cross-spawn-7.0.3.tgz#f73a85b9d5d41d045551c177e2882d4ac85728a6"
integrity sha512-iRDPJKUPVEND7dHPO8rkbOnPpyDygcDFtWjpeWNCgy8WP2rXcxXL8TskReQl6OrB2G7+UJrags1q15Fudc7G6w==
@ -3181,7 +2817,7 @@ debug@2.6.9:
dependencies:
ms "2.0.0"
debug@4, debug@^4, debug@^4.1.1, debug@^4.3.1, debug@^4.3.2, debug@^4.3.3, debug@^4.3.4:
debug@4, debug@^4.3.1, debug@^4.3.2, debug@^4.3.3, debug@^4.3.4:
version "4.3.4"
resolved "https://registry.yarnpkg.com/debug/-/debug-4.3.4.tgz#1319f6579357f2338d3337d2cdd4914bb5dcc865"
integrity sha512-PRWFHuSU3eDtQJPvnNY7Jcket1j0t5OuOsFzPPzsekD52Zl8qUfFIPEiswXqIvHWGVHOgX+7G/vCNNhehwxfkQ==
@ -3265,11 +2901,6 @@ depd@2.0.0:
resolved "https://registry.yarnpkg.com/depd/-/depd-2.0.0.tgz#b696163cc757560d09cf22cc8fad1571b79e76df"
integrity sha512-g7nH6P6dyDioJogAAGprGpCtVImJhpPk/roCzdb3fIh61/s/nPsfR6onyMwkCAR/OlC3yBC0lESvUoQEAssIrw==
deprecation@^2.0.0, deprecation@^2.3.1:
version "2.3.1"
resolved "https://registry.yarnpkg.com/deprecation/-/deprecation-2.3.1.tgz#6368cbdb40abf3373b525ac87e4a260c3a700919"
integrity sha512-xmHIy4F3scKVwMsQ4WnVaS8bHOx0DmVwRywosKhaILI0ywMDWPtBSku2HNxRvF7jtwDRsoEwYQSfbxj8b7RlJQ==
destroy@1.2.0:
version "1.2.0"
resolved "https://registry.yarnpkg.com/destroy/-/destroy-1.2.0.tgz#4803735509ad8be552934c67df614f94e66fa015"
@ -3337,11 +2968,6 @@ dotenv@^16.0.3:
resolved "https://registry.yarnpkg.com/dotenv/-/dotenv-16.4.5.tgz#cdd3b3b604cb327e286b4762e13502f717cb099f"
integrity sha512-ZmdL2rui+eB2YwhsWzjInR8LldtZHGDoQ1ugH85ppHKwpUHL7j7rN0Ti9NCnGiQbhaZ11FpR+7ao1dNsmduNUg==
eastasianwidth@^0.2.0:
version "0.2.0"
resolved "https://registry.yarnpkg.com/eastasianwidth/-/eastasianwidth-0.2.0.tgz#696ce2ec0aa0e6ea93a397ffcf24aa7840c827cb"
integrity sha512-I88TYZWc9XiYHRQ4/3c5rjjfgkjhLyW2luGIheGERbNQ6OY7yTybanSpDXZa8y7VUP9YmDcYa+eyq4ca7iLqWA==
ecdsa-sig-formatter@1.0.11:
version "1.0.11"
resolved "https://registry.yarnpkg.com/ecdsa-sig-formatter/-/ecdsa-sig-formatter-1.0.11.tgz#ae0f0fa2d85045ef14a817daa3ce9acd0489e5bf"
@ -3366,11 +2992,6 @@ elevenlabs@^0.5.0:
qs "6.11.2"
url-join "4.0.1"
emoji-regex@^10.2.1:
version "10.3.0"
resolved "https://registry.yarnpkg.com/emoji-regex/-/emoji-regex-10.3.0.tgz#76998b9268409eb3dae3de989254d456e70cfe23"
integrity sha512-QpLs9D9v9kArv4lfDEgg1X/gN5XLnf/A6l9cs8SPZLRZR3ZkY9+kwIQTxm+fsSej5UMYGE8fdoaZVIBlqG0XTw==
emoji-regex@^8.0.0:
version "8.0.0"
resolved "https://registry.yarnpkg.com/emoji-regex/-/emoji-regex-8.0.0.tgz#e818fd69ce5ccfcb404594f842963bf53164cc37"
@ -3410,11 +3031,6 @@ entities@^4.2.0:
resolved "https://registry.yarnpkg.com/entities/-/entities-4.5.0.tgz#5d268ea5e7113ec74c4d033b79ea5a35a488fb48"
integrity sha512-V0hjH4dGPh9Ao5p0MoRY6BVqtwCjhz6vI5LT8AJ55H+4g9/4vbHx1I54fS0XuclLhDHArPQCiMjDxjaL8fPxhw==
env-var@^7.3.1:
version "7.4.1"
resolved "https://registry.yarnpkg.com/env-var/-/env-var-7.4.1.tgz#88af75fe16cc11031000f88d4777802c6958a01c"
integrity sha512-H8Ga2SbXTQwt6MKEawWSvmxoH1+J6bnAXkuyE7eDvbGmrhIL2i+XGjzGM3DFHcJu8GY1zY9/AnBJY8uGQYPHiw==
es-abstract@^1.22.1, es-abstract@^1.22.3, es-abstract@^1.23.0, es-abstract@^1.23.1, es-abstract@^1.23.2, es-abstract@^1.23.3:
version "1.23.3"
resolved "https://registry.yarnpkg.com/es-abstract/-/es-abstract-1.23.3.tgz#8f0c5a35cd215312573c5a27c87dfd6c881a0aa0"
@ -4005,15 +3621,6 @@ fs-constants@^1.0.0:
resolved "https://registry.yarnpkg.com/fs-constants/-/fs-constants-1.0.0.tgz#6be0de9be998ce16af8afc24497b9ee9b7ccd9ad"
integrity sha512-y6OAwoSIf7FyjMIv94u+b5rdheZEjzR63GTyZJm5qh4Bi+2YgwLCcI/fPFZkL5PSixOt6ZNKm+w+Hfp/Bciwow==
fs-extra@^11.1.1, fs-extra@^11.2.0:
version "11.2.0"
resolved "https://registry.yarnpkg.com/fs-extra/-/fs-extra-11.2.0.tgz#e70e17dfad64232287d01929399e0ea7c86b0e5b"
integrity sha512-PmDi3uwK5nFuXh7XDTlVnS17xJS7vW36is2+w3xcv8SVxiB4NyATf4ctkVY5bkSjX0Y4nbvZCq1/EjtEyr9ktw==
dependencies:
graceful-fs "^4.2.0"
jsonfile "^6.0.1"
universalify "^2.0.0"
fs-minipass@^2.0.0:
version "2.1.0"
resolved "https://registry.yarnpkg.com/fs-minipass/-/fs-minipass-2.1.0.tgz#7f5036fdbf12c63c169190cbe4199c852271f9fb"
@ -4066,20 +3673,6 @@ gauge@^3.0.0:
strip-ansi "^6.0.1"
wide-align "^1.1.2"
gauge@^4.0.3:
version "4.0.4"
resolved "https://registry.yarnpkg.com/gauge/-/gauge-4.0.4.tgz#52ff0652f2bbf607a989793d53b751bef2328dce"
integrity sha512-f9m+BEN5jkg6a0fZjleidjN51VE1X+mPFQ2DJ0uv1V39oCLCbsGe6yjbBnp7eK7z/+GAon99a3nHuqbuuthyPg==
dependencies:
aproba "^1.0.3 || ^2.0.0"
color-support "^1.1.3"
console-control-strings "^1.1.0"
has-unicode "^2.0.1"
signal-exit "^3.0.7"
string-width "^4.2.3"
strip-ansi "^6.0.1"
wide-align "^1.1.5"
generate-function@^2.3.1:
version "2.3.1"
resolved "https://registry.yarnpkg.com/generate-function/-/generate-function-2.3.1.tgz#f069617690c10c868e73b8465746764f97c3479f"
@ -4175,11 +3768,6 @@ gopd@^1.0.1:
dependencies:
get-intrinsic "^1.1.3"
graceful-fs@^4.1.6, graceful-fs@^4.2.0:
version "4.2.11"
resolved "https://registry.yarnpkg.com/graceful-fs/-/graceful-fs-4.2.11.tgz#4183e4e8bf08bb6e05bbb2f7d2e0c8f712ca40e3"
integrity sha512-RbJ5/jmFcNNCcDV5o9eTnBLJ/HszWV0P73bc+Ff4nS/rJj+YaS6IGyiOL0VoBYX+l1Wrl3k63h/KrH+nhJ0XvQ==
graphemer@^1.4.0:
version "1.4.0"
resolved "https://registry.yarnpkg.com/graphemer/-/graphemer-1.4.0.tgz#fb2f1d55e0e3a1849aeffc90c4fa0dd53a0e66c6"
@ -4390,11 +3978,6 @@ imurmurhash@^0.1.4:
resolved "https://registry.yarnpkg.com/imurmurhash/-/imurmurhash-0.1.4.tgz#9218b9b2b928a238b13dc4fb6b6d576f231453ea"
integrity sha512-JmXMZ6wuvDmLiHEml9ykzqO6lwFbof0GG4IkcGaENdCRDDmMVnny7s5HsIgHCbaq0w2MyPhDqkhTUgS2LU2PHA==
indent-string@^4.0.0:
version "4.0.0"
resolved "https://registry.yarnpkg.com/indent-string/-/indent-string-4.0.0.tgz#624f8f4497d619b2d9768531d58f4122854d7251"
integrity sha512-EdDDZu4A2OyIK7Lr/2zG+w5jmbuk1DVBnEwREQvBzspBJkCEbRa8GxU1lghYcaGJCnRWibjDXlq779X1/y5xwg==
inflight@^1.0.4:
version "1.0.6"
resolved "https://registry.yarnpkg.com/inflight/-/inflight-1.0.6.tgz#49bd6331d7d02d0c09bc910a1075ba8165b56df9"
@ -4553,11 +4136,6 @@ is-glob@^4.0.0, is-glob@^4.0.1, is-glob@^4.0.3, is-glob@~4.0.1:
dependencies:
is-extglob "^2.1.1"
is-interactive@^2.0.0:
version "2.0.0"
resolved "https://registry.yarnpkg.com/is-interactive/-/is-interactive-2.0.0.tgz#40c57614593826da1100ade6059778d597f16e90"
integrity sha512-qP1vozQRI+BMOPcjFzrjXuQvdak2pHNUMZoeG2eRbiSqyvbEf/wQtEOTOX1guk6E3t36RkaqiSt8A/6YElNxLQ==
is-invalid-path@^0.1.0:
version "0.1.0"
resolved "https://registry.yarnpkg.com/is-invalid-path/-/is-invalid-path-0.1.0.tgz#307a855b3cf1a938b44ea70d2c61106053714f34"
@ -4655,11 +4233,6 @@ is-typed-array@^1.1.13:
dependencies:
which-typed-array "^1.1.14"
is-unicode-supported@^1.1.0, is-unicode-supported@^1.3.0:
version "1.3.0"
resolved "https://registry.yarnpkg.com/is-unicode-supported/-/is-unicode-supported-1.3.0.tgz#d824984b616c292a2e198207d4a609983842f714"
integrity sha512-43r2mRvz+8JRIKnWJ+3j8JtjRKZ6GmjzfaE/qiBJnikNnYv/6bagRJ1kUhNk8R5EX/GkobD+r+sfxCPJsiKBLQ==
is-valid-path@^0.1.1:
version "0.1.1"
resolved "https://registry.yarnpkg.com/is-valid-path/-/is-valid-path-0.1.1.tgz#110f9ff74c37f663e1ec7915eb451f2db93ac9df"
@ -4709,11 +4282,6 @@ isexe@^2.0.0:
resolved "https://registry.yarnpkg.com/isexe/-/isexe-2.0.0.tgz#e8fbf374dc556ff8947a10dcb0572d633f2cfa10"
integrity sha512-RHxMLp9lnKHGHRng9QFhRCMbYAcVpn69smSGcq3f36xjgVVWThj4qqLbTLlq7Ssj8B+fIQ1EuCEGI2lKsyQeIw==
isexe@^3.1.1:
version "3.1.1"
resolved "https://registry.yarnpkg.com/isexe/-/isexe-3.1.1.tgz#4a407e2bd78ddfb14bea0c27c6f7072dde775f0d"
integrity sha512-LpB/54B+/2J5hqQ7imZHfdU31OlgQqx7ZicVlkm9kzg9/w8GKLEcFfJl/t7DCEDueOyBAD6zCCwTO6Fzs0NoEQ==
isomorphic-fetch@^3.0.0:
version "3.0.0"
resolved "https://registry.yarnpkg.com/isomorphic-fetch/-/isomorphic-fetch-3.0.0.tgz#0267b005049046d2421207215d45d6a262b8b8b4"
@ -4820,15 +4388,6 @@ json5@^2.2.3:
resolved "https://registry.yarnpkg.com/json5/-/json5-2.2.3.tgz#78cd6f1a19bdc12b73db5ad0c61efd66c1e29283"
integrity sha512-XmOWe7eyHYH14cLdVPoyg+GOH3rYX++KpzrylJwSW98t3Nk+U8XOl8FWKOgwtzdb8lXGf6zYwDUzeHMWfxasyg==
jsonfile@^6.0.1:
version "6.1.0"
resolved "https://registry.yarnpkg.com/jsonfile/-/jsonfile-6.1.0.tgz#bc55b2634793c679ec6403094eb13698a6ec0aae"
integrity sha512-5dgndWOriYSm5cnYaJNhalLNDKOqFwyDB/rr1E9ZsGciGvKPs8R2xYGCacuf3z6K1YKDz182fd+fY3cn3pMqXQ==
dependencies:
universalify "^2.0.0"
optionalDependencies:
graceful-fs "^4.1.6"
jsonpointer@^5.0.1:
version "5.0.1"
resolved "https://registry.yarnpkg.com/jsonpointer/-/jsonpointer-5.0.1.tgz#2110e0af0900fd37467b5907ecd13a7884a1b559"
@ -4839,7 +4398,7 @@ jsonrepair@^3.7.0:
resolved "https://registry.yarnpkg.com/jsonrepair/-/jsonrepair-3.7.0.tgz#b4fddb9c8d29dd62263f4f037334099e28feac21"
integrity sha512-TwE50n4P4gdVfMQF2q+X+IGy4ntFfcuHHE8zjRyBcdtrRK0ORZsjOZD6zmdylk4p277nQBAlHgsEPWtMIQk4LQ==
jsonwebtoken@^9.0.0, jsonwebtoken@^9.0.2:
jsonwebtoken@^9.0.0:
version "9.0.2"
resolved "https://registry.yarnpkg.com/jsonwebtoken/-/jsonwebtoken-9.0.2.tgz#65ff91f4abef1784697d40952bb1998c504caaf3"
integrity sha512-PRp66vJ865SSqOlgqS8hujT5U4AOgMfhrwYIuIhfKaoSCZcirrmASQr8CX7cUg+RMih+hgznrjp99o+W4pJLHQ==
@ -5092,14 +4651,6 @@ lodash@^4.17.20, lodash@^4.17.21:
resolved "https://registry.yarnpkg.com/lodash/-/lodash-4.17.21.tgz#679591c564c3bffaae8454cf0b3df370c3d6911c"
integrity sha512-v2kDEe57lecTulaDIuNTPy3Ry4gLGJ6Z1O3vE1krgXZNrsQ+LFTGHVxVjcXPs17LhbZVGedAJv8XZ1tvj5FvSg==
log-symbols@^5.1.0:
version "5.1.0"
resolved "https://registry.yarnpkg.com/log-symbols/-/log-symbols-5.1.0.tgz#a20e3b9a5f53fac6aeb8e2bb22c07cf2c8f16d93"
integrity sha512-l0x2DvrW294C9uDCoQe1VSU4gf529FkSZ6leBl4TiqZH/e+0R7hSfHQBNut2mNygDgHwvYHfFLn6Oxb3VWj2rA==
dependencies:
chalk "^5.0.0"
is-unicode-supported "^1.1.0"
logform@^2.3.2, logform@^2.4.0:
version "2.6.0"
resolved "https://registry.yarnpkg.com/logform/-/logform-2.6.0.tgz#8c82a983f05d6eaeb2d75e3decae7a768b2bf9b5"
@ -5129,11 +4680,6 @@ loose-envify@^1.4.0:
dependencies:
js-tokens "^3.0.0 || ^4.0.0"
lru-cache@^10.0.0:
version "10.2.2"
resolved "https://registry.yarnpkg.com/lru-cache/-/lru-cache-10.2.2.tgz#48206bc114c1252940c41b25b41af5b545aca878"
integrity sha512-9hp3Vp2/hFQUiIwKo8XCeFVnrg8Pk3TYNPIR7tJADKi5YfcF7vEaK7avFHTlSy3kOKYaJQaalfEo6YuXdceBOQ==
lru-cache@^6.0.0:
version "6.0.0"
resolved "https://registry.yarnpkg.com/lru-cache/-/lru-cache-6.0.0.tgz#6d6fe6570ebd96aaf90fcad1dafa3b2566db3a94"
@ -5177,13 +4723,6 @@ media-typer@0.3.0:
resolved "https://registry.yarnpkg.com/media-typer/-/media-typer-0.3.0.tgz#8710d7af0aa626f8fffa1ce00168545263255748"
integrity sha512-dq+qelQ9akHpcOl/gUVRTxVIOkAJ1wR3QAvb4RsVjS8oVoFjDGTc679wJYmUmknUF5HwMLOgb5O+a3KxfWapPQ==
memory-stream@^1.0.0:
version "1.0.0"
resolved "https://registry.yarnpkg.com/memory-stream/-/memory-stream-1.0.0.tgz#481dfd259ccdf57b03ec2c9632960044180e73c2"
integrity sha512-Wm13VcsPIMdG96dzILfij09PvuS3APtcKNh7M28FsCA/w6+1mjR7hhPmfFNoilX9xU7wTdhsH5lJAm6XNzdtww==
dependencies:
readable-stream "^3.4.0"
merge-descriptors@1.0.1:
version "1.0.1"
resolved "https://registry.yarnpkg.com/merge-descriptors/-/merge-descriptors-1.0.1.tgz#b00aaa556dd8b44568150ec9d1b953f3f90cbb61"
@ -5440,16 +4979,6 @@ node-addon-api@^6.1.0:
resolved "https://registry.yarnpkg.com/node-addon-api/-/node-addon-api-6.1.0.tgz#ac8470034e58e67d0c6f1204a18ae6995d9c0d76"
integrity sha512-+eawOlIgy680F0kBzPUNFhMZGtJ1YmqM6l4+Crf4IkImjYrO/mqPwRMh352g23uIaQKFItcQ64I7KMaJxHgAVA==
node-addon-api@^7.0.0:
version "7.1.0"
resolved "https://registry.yarnpkg.com/node-addon-api/-/node-addon-api-7.1.0.tgz#71f609369379c08e251c558527a107107b5e0fdb"
integrity sha512-mNcltoe1R8o7STTegSOHdnJNN7s5EUvhoS7ShnTHDyOSd+8H+UdWODq6qSv67PjC8Zc5JRT8+oLAMCr0SIXw7g==
node-api-headers@^1.1.0:
version "1.1.0"
resolved "https://registry.yarnpkg.com/node-api-headers/-/node-api-headers-1.1.0.tgz#3f9dd7bb10b29e1c3e3db675979605a308b2373c"
integrity sha512-ucQW+SbYCUPfprvmzBsnjT034IGRB2XK8rRc78BgjNKhTdFKgAwAmgW704bKIBmcYW48it0Gkjpkd39Azrwquw==
node-domexception@1.0.0:
version "1.0.0"
resolved "https://registry.yarnpkg.com/node-domexception/-/node-domexception-1.0.0.tgz#6888db46a1f71c0b76b3f7555016b63fe64766e5"
@ -5477,28 +5006,6 @@ node-html-parser@^6.1.1:
css-select "^5.1.0"
he "1.2.0"
node-llama-cpp@^2.8.0:
version "2.8.10"
resolved "https://registry.yarnpkg.com/node-llama-cpp/-/node-llama-cpp-2.8.10.tgz#6a4359a072c780f82fdad9b1112d1d4be7f4796a"
integrity sha512-00qX591hTNhgLGS3QDhoT40LFmLgLweyLiouxdNy8+X2YamUBBSZhT1ipG1STLKZ2IpITzSm53oXtH9qDvMfXQ==
dependencies:
chalk "^5.3.0"
chmodrp "^1.0.2"
cli-progress "^3.12.0"
cmake-js "^7.2.1"
cross-env "^7.0.3"
cross-spawn "^7.0.3"
env-var "^7.3.1"
fs-extra "^11.1.1"
log-symbols "^5.1.0"
node-addon-api "^7.0.0"
octokit "^3.1.0"
ora "^7.0.1"
simple-git "^3.19.1"
uuid "^9.0.0"
which "^4.0.0"
yargs "^17.7.2"
node-modules-regexp@^1.0.0:
version "1.0.0"
resolved "https://registry.yarnpkg.com/node-modules-regexp/-/node-modules-regexp-1.0.0.tgz#8d9dbe28964a4ac5712e9131642107c71e90ec40"
@ -5556,16 +5063,6 @@ npmlog@^5.0.1:
gauge "^3.0.0"
set-blocking "^2.0.0"
npmlog@^6.0.2:
version "6.0.2"
resolved "https://registry.yarnpkg.com/npmlog/-/npmlog-6.0.2.tgz#c8166017a42f2dea92d6453168dd865186a70830"
integrity sha512-/vBvz5Jfr9dT/aFWd0FIRf+T/Q2WBsLENygUaFUqstqsycmZAP/t5BvFJTK0viFmSUxiUKTUplWy5vt+rvKIxg==
dependencies:
are-we-there-yet "^3.0.0"
console-control-strings "^1.1.0"
gauge "^4.0.3"
set-blocking "^2.0.0"
nth-check@^2.0.1:
version "2.1.1"
resolved "https://registry.yarnpkg.com/nth-check/-/nth-check-2.1.1.tgz#c9eab428effce36cd6b92c924bdb000ef1f1ed1d"
@ -5645,22 +5142,6 @@ object.values@^1.1.6, object.values@^1.1.7:
define-properties "^1.2.1"
es-object-atoms "^1.0.0"
octokit@^3.1.0:
version "3.2.0"
resolved "https://registry.yarnpkg.com/octokit/-/octokit-3.2.0.tgz#2e75bb0a5f0511aa37ee20c2714206ed0d09e2bf"
integrity sha512-f25eJ/8ITwF2BdwymOjK9I5ll9Azt8UbfHE2u5ho0gVdgfpIZkUgMGbQjbvgOYGbtIAYxh7ghH3BUbZrYal1Gw==
dependencies:
"@octokit/app" "^14.0.2"
"@octokit/core" "^5.0.0"
"@octokit/oauth-app" "^6.0.0"
"@octokit/plugin-paginate-graphql" "^4.0.0"
"@octokit/plugin-paginate-rest" "^9.0.0"
"@octokit/plugin-rest-endpoint-methods" "^10.0.0"
"@octokit/plugin-retry" "^6.0.0"
"@octokit/plugin-throttling" "^8.0.0"
"@octokit/request-error" "^5.0.0"
"@octokit/types" "^12.0.0"
ollama@^0.5.0:
version "0.5.0"
resolved "https://registry.yarnpkg.com/ollama/-/ollama-0.5.0.tgz#cb9bc709d4d3278c9f484f751b0d9b98b06f4859"
@ -5689,7 +5170,7 @@ one-time@^1.0.0:
dependencies:
fn.name "1.x.x"
onetime@^5.1.0, onetime@^5.1.2:
onetime@^5.1.2:
version "5.1.2"
resolved "https://registry.yarnpkg.com/onetime/-/onetime-5.1.2.tgz#d0e96ebb56b07476df1dd9c4806e5237985ca45e"
integrity sha512-kbpaSSGJTWdAY5KPVeMOKXSrPtr8C8C7wodJbcsd51jRnmD+GZu8Y0VoU6Dm5Z4vWr0Ig/1NKuWRKf7j5aaYSg==
@ -5795,21 +5276,6 @@ optionator@^0.9.3:
type-check "^0.4.0"
word-wrap "^1.2.5"
ora@^7.0.1:
version "7.0.1"
resolved "https://registry.yarnpkg.com/ora/-/ora-7.0.1.tgz#cdd530ecd865fe39e451a0e7697865669cb11930"
integrity sha512-0TUxTiFJWv+JnjWm4o9yvuskpEJLXTcng8MJuKd+SzAzp2o+OP3HWqNhB4OdJRt1Vsd9/mR0oyaEYlOnL7XIRw==
dependencies:
chalk "^5.3.0"
cli-cursor "^4.0.0"
cli-spinners "^2.9.0"
is-interactive "^2.0.0"
is-unicode-supported "^1.3.0"
log-symbols "^5.1.0"
stdin-discarder "^0.1.0"
string-width "^6.1.0"
strip-ansi "^7.1.0"
os-tmpdir@~1.0.2:
version "1.0.2"
resolved "https://registry.yarnpkg.com/os-tmpdir/-/os-tmpdir-1.0.2.tgz#bbe67406c79aa85c5cfec766fe5734555dfa1274"
@ -6311,14 +5777,6 @@ resolve@^2.0.0-next.5:
path-parse "^1.0.7"
supports-preserve-symlinks-flag "^1.0.0"
restore-cursor@^4.0.0:
version "4.0.0"
resolved "https://registry.yarnpkg.com/restore-cursor/-/restore-cursor-4.0.0.tgz#519560a4318975096def6e609d44100edaa4ccb9"
integrity sha512-I9fPXU9geO9bHOt9pHHOhOkYerIMsmVaWB0rA2AI9ERh/+x/i7MV5HKBNrg+ljO5eoPVgCcnFuRjJ9uH6I/3eg==
dependencies:
onetime "^5.1.0"
signal-exit "^3.0.2"
retry@^0.13.1:
version "0.13.1"
resolved "https://registry.yarnpkg.com/retry/-/retry-0.13.1.tgz#185b1587acf67919d63b357349e03537b2484658"
@ -6521,7 +5979,7 @@ side-channel@^1.0.4, side-channel@^1.0.6:
get-intrinsic "^1.2.4"
object-inspect "^1.13.1"
signal-exit@^3.0.0, signal-exit@^3.0.2, signal-exit@^3.0.3, signal-exit@^3.0.7:
signal-exit@^3.0.0, signal-exit@^3.0.3:
version "3.0.7"
resolved "https://registry.yarnpkg.com/signal-exit/-/signal-exit-3.0.7.tgz#a9a1767f8af84155114eaabd73f99273c8f59ad9"
integrity sha512-wnD2ZE+l+SPC/uoS0vXeE9L1+0wuaMqKlfz9AMUo38JsyLSBWSFcHR1Rri62LZc12vLr1gb3jl7iwQhgwpAbGQ==
@ -6545,15 +6003,6 @@ simple-get@^4.0.0, simple-get@^4.0.1:
once "^1.3.1"
simple-concat "^1.0.0"
simple-git@^3.19.1:
version "3.24.0"
resolved "https://registry.yarnpkg.com/simple-git/-/simple-git-3.24.0.tgz#33a8c88dc6fa74e53eaf3d6bfc27d0182a49ec00"
integrity sha512-QqAKee9Twv+3k8IFOFfPB2hnk6as6Y6ACUpwCtQvRYBAes23Wv3SZlHVobAzqcE8gfsisCvPw3HGW3HYM+VYYw==
dependencies:
"@kwsites/file-exists" "^1.1.1"
"@kwsites/promise-deferred" "^1.1.1"
debug "^4.3.4"
simple-swizzle@^0.2.2:
version "0.2.2"
resolved "https://registry.yarnpkg.com/simple-swizzle/-/simple-swizzle-0.2.2.tgz#a4da6b635ffcccca33f70d17cb92592de95e557a"
@ -6598,13 +6047,6 @@ statuses@2.0.1:
resolved "https://registry.yarnpkg.com/statuses/-/statuses-2.0.1.tgz#55cb000ccf1d48728bd23c685a063998cf1a1b63"
integrity sha512-RwNA9Z/7PrK06rYLIzFMlaF+l73iwpzsqRIFgbMLbTcLD6cOao82TaWefPXQvB2fOC4AjuYSEndS7N/mTCbkdQ==
stdin-discarder@^0.1.0:
version "0.1.0"
resolved "https://registry.yarnpkg.com/stdin-discarder/-/stdin-discarder-0.1.0.tgz#22b3e400393a8e28ebf53f9958f3880622efde21"
integrity sha512-xhV7w8S+bUwlPTb4bAOUQhv8/cSS5offJuX8GQGq32ONF0ZtDWKfkdomM3HMRA+LhX6um/FZ0COqlwsjD53LeQ==
dependencies:
bl "^5.0.0"
stoppable@^1.1.0:
version "1.1.0"
resolved "https://registry.yarnpkg.com/stoppable/-/stoppable-1.1.0.tgz#32da568e83ea488b08e4d7ea2c3bcc9d75015d5b"
@ -6644,15 +6086,6 @@ string-natural-compare@^3.0.1:
is-fullwidth-code-point "^3.0.0"
strip-ansi "^6.0.1"
string-width@^6.1.0:
version "6.1.0"
resolved "https://registry.yarnpkg.com/string-width/-/string-width-6.1.0.tgz#96488d6ed23f9ad5d82d13522af9e4c4c3fd7518"
integrity sha512-k01swCJAgQmuADB0YIc+7TuatfNvTBVOoaUWJjTB9R4VJzR5vNWzf5t42ESVZFPS8xTySF7CAdV4t/aaIm3UnQ==
dependencies:
eastasianwidth "^0.2.0"
emoji-regex "^10.2.1"
strip-ansi "^7.0.1"
string.prototype.matchall@^4.0.10:
version "4.0.11"
resolved "https://registry.yarnpkg.com/string.prototype.matchall/-/string.prototype.matchall-4.0.11.tgz#1092a72c59268d2abaad76582dccc687c0297e0a"
@ -6720,13 +6153,6 @@ strip-ansi@^6.0.0, strip-ansi@^6.0.1:
dependencies:
ansi-regex "^5.0.1"
strip-ansi@^7.0.1, strip-ansi@^7.1.0:
version "7.1.0"
resolved "https://registry.yarnpkg.com/strip-ansi/-/strip-ansi-7.1.0.tgz#d5b6568ca689d8561370b0707685d22434faff45"
integrity sha512-iq6eVVI64nQQTRYq2KtEg2d2uU7LElhTJwsH4YzIHZshxlgZms/wIc4VoDQTlG/IvVIrBKG06CrZnp0qv7hkcQ==
dependencies:
ansi-regex "^6.0.1"
strip-final-newline@^2.0.0:
version "2.0.0"
resolved "https://registry.yarnpkg.com/strip-final-newline/-/strip-final-newline-2.0.0.tgz#89b852fb2fcbe936f6f4b3187afb0a12c1ab58ad"
@ -6850,7 +6276,7 @@ tar-stream@^3.1.5:
fast-fifo "^1.2.0"
streamx "^2.15.0"
tar@^6.1.11, tar@^6.2.0:
tar@^6.1.11:
version "6.2.1"
resolved "https://registry.yarnpkg.com/tar/-/tar-6.2.1.tgz#717549c541bc3c2af15751bea94b1dd068d4b03a"
integrity sha512-DZ4yORTwrbTj/7MZYq2w+/ZFdI6OZ/f9SFHR+71gIVUZhOQPHzVCLpvRnPgyaMpfWxxk/4ONva3GQSyNIKRv6A==
@ -7063,24 +6489,6 @@ undici@~5.28.4:
dependencies:
"@fastify/busboy" "^2.0.0"
universal-github-app-jwt@^1.1.2:
version "1.1.2"
resolved "https://registry.yarnpkg.com/universal-github-app-jwt/-/universal-github-app-jwt-1.1.2.tgz#8c1867a394d7d9d42cda34f11d1bcb023797d8df"
integrity sha512-t1iB2FmLFE+yyJY9+3wMx0ejB+MQpEVkH0gQv7dR6FZyltyq+ZZO0uDpbopxhrZ3SLEO4dCEkIujOMldEQ2iOA==
dependencies:
"@types/jsonwebtoken" "^9.0.0"
jsonwebtoken "^9.0.2"
universal-user-agent@^6.0.0:
version "6.0.1"
resolved "https://registry.yarnpkg.com/universal-user-agent/-/universal-user-agent-6.0.1.tgz#15f20f55da3c930c57bddbf1734c6654d5fd35aa"
integrity sha512-yCzhz6FN2wU1NiiQRogkTQszlQSlpWaw8SvVegAc+bDxbzHgh1vX8uIe8OYyMH6DwH+sdTJsgMl36+mSMdRJIQ==
universalify@^2.0.0:
version "2.0.1"
resolved "https://registry.yarnpkg.com/universalify/-/universalify-2.0.1.tgz#168efc2180964e6386d061e094df61afe239b18d"
integrity sha512-gptHNQghINnc/vTGIk0SOFGFNXw7JVrlRUtConJRlvaw6DuX0wO5Jeko9sWrMBhh+PsYAZ7oXAiOnf/UKogyiw==
unpipe@1.0.0, unpipe@~1.0.0:
version "1.0.0"
resolved "https://registry.yarnpkg.com/unpipe/-/unpipe-1.0.0.tgz#b2bf4ee8514aae6165b4817829d21b2ef49904ec"
@ -7093,7 +6501,7 @@ uri-js@^4.2.2, uri-js@^4.4.1:
dependencies:
punycode "^2.1.0"
url-join@4.0.1, url-join@^4.0.1:
url-join@4.0.1:
version "4.0.1"
resolved "https://registry.yarnpkg.com/url-join/-/url-join-4.0.1.tgz#b642e21a2646808ffa178c4c5fda39844e12cde7"
integrity sha512-jk1+QP6ZJqyOiuEI9AEWQfju/nB2Pw466kbA0LEZljHwKeMgd9WrAEgEGxjPDD2+TNbbb37rTyhEfrCXfuKXnA==
@ -7235,21 +6643,14 @@ which-typed-array@^1.1.14, which-typed-array@^1.1.15, which-typed-array@^1.1.9:
gopd "^1.0.1"
has-tostringtag "^1.0.2"
which@^2.0.1, which@^2.0.2:
which@^2.0.1:
version "2.0.2"
resolved "https://registry.yarnpkg.com/which/-/which-2.0.2.tgz#7c6a8dd0a636a0327e10b59c9286eee93f3f51b1"
integrity sha512-BLI3Tl1TW3Pvl70l3yq3Y64i+awpwXqsGBYWkkqMtnbXgrMD+yj7rhW0kuEDxzJaYXGjEW5ogapKNMEKNMjibA==
dependencies:
isexe "^2.0.0"
which@^4.0.0:
version "4.0.0"
resolved "https://registry.yarnpkg.com/which/-/which-4.0.0.tgz#cd60b5e74503a3fbcfbf6cd6b4138a8bae644c1a"
integrity sha512-GlaYyEb07DPxYCKhKzplCWBJtvxZcZMrL+4UkrTSJHHPyZU4mYYTv3qaOe77H7EODLSSopAUFAc6W8U4yqvscg==
dependencies:
isexe "^3.1.1"
wide-align@^1.1.2, wide-align@^1.1.5:
wide-align@^1.1.2:
version "1.1.5"
resolved "https://registry.yarnpkg.com/wide-align/-/wide-align-1.1.5.tgz#df1d4c206854369ecf3c9a4898f1b23fbd9d15d3"
integrity sha512-eDMORYaPNZ4sQIuuYPDHdQvf4gyCF9rEEV/yPxGfwPkRodwEgiMUUXTx/dex+Me0wxx53S+NgUHaP7y3MGlDmg==