Compare commits

...

7 commits

Author SHA1 Message Date
Timothy Carambat
f64f2944d5
Merge branch 'master' into ocr-parse-pdfs 2025-02-14 11:54:32 -08:00
timothycarambat
db4208ba00 bump dev 2025-02-14 11:33:20 -08:00
Mert Cobanov
cf8c7c28a1
Turkish translations () 2025-02-14 11:10:50 -08:00
timothycarambat
3e616fe53f norm pkgs 2025-02-14 10:33:26 -08:00
timothycarambat
64b3210db2 build our own worker fanout and wrapper 2025-02-14 10:31:06 -08:00
Timothy Carambat
b6d3a411b1
Add querySelectorAll capability to web-scraping block ()
* Add `querySelectorAll` capability to web-scraping block

* patches and fallbacks

* fix styles of text in web scraping block

---------

Co-authored-by: shatfield4 <seanhatfield5@gmail.com>
2025-02-13 16:11:15 -08:00
Adam Setch
ea8454ea79
chore: rename Gitlab to GitLab ()
* chore: rename `Gitlab` to `GitLab`

Signed-off-by: Adam Setch <adam.setch@outlook.com>

* undo code changes - breaks refs

---------

Signed-off-by: Adam Setch <adam.setch@outlook.com>
Co-authored-by: timothycarambat <rambat1010@gmail.com>
2025-02-13 12:13:37 -08:00
21 changed files with 775 additions and 167 deletions
.github/workflows
collector
index.jspackage.json
processLink
processSingleFile/convert/asPDF
utils/OCRLoader
yarn.lock
frontend/src
components/DataConnectorOption/media
pages/Admin/AgentBuilder
BlockList
nodes/WebScrapingNode
locales
server
models
package.json
storage/models
utils
agentFlows/executors
collectorApi
yarn.lock

View file

@ -6,7 +6,7 @@ concurrency:
on:
push:
branches: ['agent-builder'] # put your current branch to create a build. Core team only.
branches: ['ocr-parse-pdfs'] # put your current branch to create a build. Core team only.
paths-ignore:
- '**.md'
- 'cloud-deployments/*'

View file

@ -83,9 +83,9 @@ app.post(
"/util/get-link",
[verifyPayloadIntegrity],
async function (request, response) {
const { link } = reqBody(request);
const { link, captureAs = "text" } = reqBody(request);
try {
const { success, content = null } = await getLinkText(link);
const { success, content = null } = await getLinkText(link, captureAs);
response.status(200).json({ url: link, success, content });
} catch (e) {
console.error(e);

View file

@ -19,7 +19,7 @@
"@xenova/transformers": "^2.11.0",
"bcrypt": "^5.1.0",
"body-parser": "^1.20.2",
"canvas": "2.11.2",
"canvas": "^2.11.2",
"cors": "^2.8.5",
"dotenv": "^16.0.3",
"epub2": "^3.0.2",

View file

@ -6,9 +6,20 @@ const { writeToServerDocuments } = require("../../utils/files");
const { tokenizeString } = require("../../utils/tokenizer");
const { default: slugify } = require("slugify");
async function scrapeGenericUrl(link, textOnly = false) {
console.log(`-- Working URL ${link} --`);
const content = await getPageContent(link);
/**
* Scrape a generic URL and return the content in the specified format
* @param {string} link - The URL to scrape
* @param {('html' | 'text')} captureAs - The format to capture the page content as
* @param {boolean} processAsDocument - Whether to process the content as a document or return the content directly
* @returns {Promise<Object>} - The content of the page
*/
async function scrapeGenericUrl(
link,
captureAs = "text",
processAsDocument = true
) {
console.log(`-- Working URL ${link} => (${captureAs}) --`);
const content = await getPageContent(link, captureAs);
if (!content.length) {
console.error(`Resulting URL content was empty at ${link}.`);
@ -19,7 +30,7 @@ async function scrapeGenericUrl(link, textOnly = false) {
};
}
if (textOnly) {
if (!processAsDocument) {
return {
success: true,
content,
@ -52,7 +63,13 @@ async function scrapeGenericUrl(link, textOnly = false) {
return { success: true, reason: null, documents: [document] };
}
async function getPageContent(link) {
/**
* Get the content of a page
* @param {string} link - The URL to get the content of
* @param {('html' | 'text')} captureAs - The format to capture the page content as
* @returns {Promise<string>} - The content of the page
*/
async function getPageContent(link, captureAs = "text") {
try {
let pageContents = [];
const loader = new PuppeteerWebBaseLoader(link, {
@ -64,7 +81,11 @@ async function getPageContent(link) {
waitUntil: "networkidle2",
},
async evaluate(page, browser) {
const result = await page.evaluate(() => document.body.innerText);
const result = await page.evaluate((captureAs) => {
if (captureAs === "text") return document.body.innerText;
if (captureAs === "html") return document.documentElement.innerHTML;
return document.body.innerText;
}, captureAs);
await browser.close();
return result;
},

View file

@ -6,9 +6,15 @@ async function processLink(link) {
return await scrapeGenericUrl(link);
}
async function getLinkText(link) {
/**
* Get the text content of a link
* @param {string} link - The link to get the text content of
* @param {('html' | 'text' | 'json')} captureAs - The format to capture the page content as
* @returns {Promise<{success: boolean, content: string}>} - Response from collector
*/
async function getLinkText(link, captureAs = "text") {
if (!validURL(link)) return { success: false, reason: "Not a valid URL." };
return await scrapeGenericUrl(link, true);
return await scrapeGenericUrl(link, captureAs, false);
}
module.exports = {

View file

@ -1,39 +0,0 @@
class NodeCanvasFactory {
constructor() {
this.Canvas = null;
}
async init() {
this.Canvas = await import("canvas");
this.Image = this.Canvas.Image;
}
create(
width,
height,
transparent
) {
const canvas = this.Canvas.createCanvas(width, height);
const context = canvas.getContext("2d", { alpha: transparent });
if (transparent) context.clearRect(0, 0, width, height);
return {
canvas,
context,
};
}
reset(canvasAndContext, width, height) {
canvasAndContext.canvas.width = width;
canvasAndContext.canvas.height = height;
}
destroy(canvasAndContext) {
canvasAndContext.canvas.width = 0;
canvasAndContext.canvas.height = 0;
canvasAndContext.canvas = null;
canvasAndContext.context = null;
}
}
module.exports = NodeCanvasFactory;

View file

@ -1,22 +1,12 @@
const fs = require("fs").promises;
const path = require("path");
const NodeCanvasFactory = require("./CanvasFactory");
class PDFLoader {
constructor(filePath, { splitPages = true } = {}) {
this.filePath = filePath;
this.splitPages = splitPages;
this.metadata = {};
}
/**
* Loads a PDF file and returns an array of documents.
* This function is reserved to parsing for DIGITAL documents - scanned documents are not supported in this function
* For scanned documents, use the `asOCR` function instead.
* @returns {Promise<{pageContent: string, metadata: object}[]>} An array of documents with page content and metadata.
*/
async load() {
const documents = [];
const buffer = await fs.readFile(this.filePath);
const { getDocument, version } = await this.getPdfJS();
@ -28,21 +18,15 @@ class PDFLoader {
}).promise;
const meta = await pdf.getMetadata().catch(() => null);
this.metadata = {
source: this.filePath,
pdf: {
version,
info: meta?.info,
metadata: meta?.metadata,
totalPages: pdf.numPages,
},
};
const documents = [];
for (let i = 1; i <= pdf.numPages; i += 1) {
const page = await pdf.getPage(i);
const content = await page.getTextContent();
if (content.items.length === 0) continue;
if (content.items.length === 0) {
continue;
}
let lastY;
const textItems = [];
@ -61,88 +45,46 @@ class PDFLoader {
documents.push({
pageContent: text.trim(),
metadata: {
...this.metadata,
source: this.filePath,
pdf: {
version,
info: meta?.info,
metadata: meta?.metadata,
totalPages: pdf.numPages,
},
loc: { pageNumber: i },
},
});
}
if (this.splitPages) return documents;
if (documents.length === 0) return [];
if (this.splitPages) {
return documents;
}
if (documents.length === 0) {
return [];
}
return [
{
pageContent: documents.map((doc) => doc.pageContent).join("\n\n"),
metadata: this.metadata,
metadata: {
source: this.filePath,
pdf: {
version,
info: meta?.info,
metadata: meta?.metadata,
totalPages: pdf.numPages,
},
},
},
];
}
/**
* Loads a PDF file and returns an array of documents.
* This function is reserved to parsing for SCANNED documents - digital documents are not supported in this function
* For digital documents, use the `load` function instead.
* @returns {Promise<{pageContent: string, metadata: object}[]>} An array of documents with page content and metadata.
*/
async asOCR() {
const documents = [];
const pdfjs = await import("pdf-parse/lib/pdf.js/v2.0.550/build/pdf.js");
const buffer = await fs.readFile(this.filePath);
const canvasFactory = new NodeCanvasFactory();
await canvasFactory.init();
global.Image = canvasFactory.Image;
const pdfDocument = await pdfjs.getDocument({
data: new Uint8Array(buffer),
canvasFactory,
}).promise;
async function getPageAsBuffer(pageNumber, scale = 1) {
const page = await pdfDocument.getPage(pageNumber);
const viewport = page.getViewport(scale);
const { canvas, context } = canvasFactory.create(
viewport.width,
viewport.height,
false
);
await page.render({
canvasFactory,
canvasContext: context,
viewport,
}).promise;
return canvas.toBuffer();
}
const { createWorker, setLogging, OEM } = require("tesseract.js");
setLogging(false);
const worker = await createWorker("eng", OEM.LSTM_ONLY, {
cachePath: path.resolve(__dirname, `../../../../storage/tmp`),
});
for (let i = 1; i <= pdfDocument.numPages; i += 1) {
const image = await getPageAsBuffer(i, 5);
const { data } = await worker.recognize(image, {}, "text");
documents.push({
pageContent: data.text,
metadata: {
...this.metadata,
loc: { pageNumber: i },
},
});
}
return documents;
}
async getPdfJS() {
try {
const pdfjs = await import("pdf-parse/lib/pdf.js/v1.10.100/build/pdf.js");
return {
getDocument: pdfjs.getDocument,
version: pdfjs.version,
};
return { getDocument: pdfjs.getDocument, version: pdfjs.version };
} catch (e) {
console.error(e);
throw new Error(

View file

@ -7,6 +7,7 @@ const {
const { tokenizeString } = require("../../../utils/tokenizer");
const { default: slugify } = require("slugify");
const PDFLoader = require("./PDFLoader");
const OCRLoader = require("../../../utils/OCRLoader");
async function asPdf({ fullFilePath = "", filename = "" }) {
const pdfLoader = new PDFLoader(fullFilePath, {
@ -19,9 +20,9 @@ async function asPdf({ fullFilePath = "", filename = "" }) {
if (docs.length === 0) {
console.log(
`[PDFLoader] No text content found for ${filename}. Attempting OCR parse.`
`[asPDF] No text content found for ${filename}. Will attempt OCR parse.`
);
docs = await pdfLoader.asOCR();
docs = await new OCRLoader().ocrPDF(fullFilePath);
}
for (const doc of docs) {
@ -35,9 +36,7 @@ async function asPdf({ fullFilePath = "", filename = "" }) {
}
if (!pageContent.length) {
console.error(
`[PDFLoader] Resulting text content was empty for ${filename}.`
);
console.error(`[asPDF] Resulting text content was empty for ${filename}.`);
trashFile(fullFilePath);
return {
success: false,

View file

@ -0,0 +1,52 @@
/**
* This is a factory for creating a canvas and context in Node.js
* it is used to create a canvas and context for the PDFLoader for turning the PDF into an image
* so we can later use the image to extract text from the PDF.
*/
class NodeCanvasFactory {
constructor() {
this.CanvasModule = null;
}
async init() {
this.CanvasModule = await import("canvas");
this.Image = this.CanvasModule.Image;
}
/**
* Creates a canvas and context for the PDFLoader
* @param {number} width - The width of the canvas
* @param {number} height - The height of the canvas
* @param {boolean} transparent - Whether the canvas is transparent
* @returns {{canvas: HTMLCanvasElement, context: CanvasRenderingContext2D}} - The canvas and context
*/
create(width, height, transparent = false) {
const canvas = this.CanvasModule.createCanvas(width, height);
const context = canvas.getContext("2d", { alpha: transparent });
if (transparent) context.clearRect(0, 0, width, height);
return {
canvas,
context,
};
}
/**
* Required for the PDFLoader pdfjs interation - do not remove or use directly.
*/
reset(canvasAndContext, width, height) {
canvasAndContext.canvas.width = width;
canvasAndContext.canvas.height = height;
}
/**
* Required for the PDFLoader pdfjs interation - do not remove or use directly.
*/
destroy(canvasAndContext) {
canvasAndContext.canvas.width = 0;
canvasAndContext.canvas.height = 0;
canvasAndContext.canvas = null;
canvasAndContext.context = null;
}
}
module.exports = NodeCanvasFactory;

View file

@ -0,0 +1,190 @@
const fs = require("fs");
const os = require("os");
const path = require("path");
const NodeCanvasFactory = require("./CanvasFactory");
class OCRLoader {
constructor() {
this.cacheDir = path.resolve(
process.env.STORAGE_DIR
? path.resolve(process.env.STORAGE_DIR, `models`, `tesseract`)
: path.resolve(__dirname, `../../../server/storage/models/tesseract`)
);
}
log(text, ...args) {
console.log(`\x1b[36m[OCRLoader]\x1b[0m ${text}`, ...args);
}
/**
* Loads a PDF file and returns an array of documents.
* This function is reserved to parsing for SCANNED documents - digital documents are not supported in this function
* @returns {Promise<{pageContent: string, metadata: object}[]>} An array of documents with page content and metadata.
*/
async ocrPDF(
filePath,
{ maxExecutionTime = 300_000, batchSize = 10, maxWorkers = null } = {}
) {
if (
!filePath ||
!fs.existsSync(filePath) ||
!fs.statSync(filePath).isFile()
) {
this.log(`File ${filePath} does not exist. Skipping OCR.`);
return [];
}
const documentTitle = path.basename(filePath);
this.log(`Starting OCR of ${documentTitle}`);
const pdfjs = await import("pdf-parse/lib/pdf.js/v2.0.550/build/pdf.js");
let buffer = fs.readFileSync(filePath);
const canvasFactory = new NodeCanvasFactory();
await canvasFactory.init();
global.Image = canvasFactory.Image;
const pdfDocument = await pdfjs.getDocument({
data: new Uint8Array(buffer),
canvasFactory,
}).promise;
buffer = null;
const documents = [];
const meta = await pdfDocument.getMetadata().catch(() => null);
const metadata = {
source: filePath,
pdf: {
version: "v2.0.550",
info: meta?.info,
metadata: meta?.metadata,
totalPages: pdfDocument.numPages,
},
};
async function getPageAsBuffer(pageNumber, scale = 1) {
let canvas = null;
let context = null;
try {
const page = await pdfDocument.getPage(pageNumber);
const viewport = page.getViewport(scale);
({ canvas, context } = canvasFactory.create(
viewport.width,
viewport.height
));
await page.render({
canvasFactory,
canvasContext: context,
viewport,
}).promise;
return canvas.toBuffer();
} catch (e) {
this.log(`Error getting page as buffer: ${e.message}`);
return null;
} finally {
canvas = null;
context = null;
}
}
const { createWorker, OEM } = require("tesseract.js");
const BATCH_SIZE = batchSize;
const MAX_EXECUTION_TIME = maxExecutionTime;
const NUM_WORKERS = maxWorkers ?? Math.min(os.cpus().length, 4);
const totalPages = pdfDocument.numPages;
const workerPool = await Promise.all(
Array(NUM_WORKERS)
.fill(0)
.map(() =>
createWorker("eng", OEM.LSTM_ONLY, {
cachePath: this.cacheDir,
})
)
);
const startTime = Date.now();
try {
this.log("Bootstrapping OCR completed successfully!", {
MAX_EXECUTION_TIME_MS: MAX_EXECUTION_TIME,
BATCH_SIZE,
MAX_CONCURRENT_WORKERS: NUM_WORKERS,
TOTAL_PAGES: totalPages,
});
const timeoutPromise = new Promise((_, reject) => {
setTimeout(() => {
reject(
new Error(
`OCR job took too long to complete (${
MAX_EXECUTION_TIME / 1000
} seconds)`
)
);
}, MAX_EXECUTION_TIME);
});
const processPages = async () => {
for (
let startPage = 1;
startPage <= totalPages;
startPage += BATCH_SIZE
) {
const endPage = Math.min(startPage + BATCH_SIZE - 1, totalPages);
const pageNumbers = Array.from(
{ length: endPage - startPage + 1 },
(_, i) => startPage + i
);
this.log(`Working on pages ${startPage} - ${endPage}`);
const pageQueue = [...pageNumbers];
const results = [];
const workerPromises = workerPool.map(async (worker, workerIndex) => {
while (pageQueue.length > 0) {
const pageNum = pageQueue.shift();
this.log(
`\x1b[34m[Worker ${
workerIndex + 1
}]\x1b[0m assigned pg${pageNum}`
);
const imageBuffer = await getPageAsBuffer(pageNum, 5);
const { data } = await worker.recognize(imageBuffer, {}, "text");
this.log(
`\x1b[34m[Worker ${
workerIndex + 1
}]\x1b[0m completed pg${pageNum}`
);
results.push({
pageContent: data.text,
metadata: {
...metadata,
loc: { pageNumber: pageNum },
},
});
}
});
await Promise.all(workerPromises);
documents.push(
...results.sort(
(a, b) => a.metadata.loc.pageNumber - b.metadata.loc.pageNumber
)
);
}
return documents;
};
await Promise.race([timeoutPromise, processPages()]);
} catch (e) {
this.log(`Error: ${e.message}`);
} finally {
global.Image = undefined;
await Promise.all(workerPool.map((worker) => worker.terminate()));
}
this.log(`Completed OCR of ${documentTitle}!`, {
documentsParsed: documents.length,
totalPages: totalPages,
executionTime: `${((Date.now() - startTime) / 1000).toFixed(2)}s`,
});
return documents;
}
}
module.exports = OCRLoader;

View file

@ -793,7 +793,7 @@ camelcase@6:
resolved "https://registry.yarnpkg.com/camelcase/-/camelcase-6.3.0.tgz#5685b95eb209ac9c0c177467778c9c84df58ba9a"
integrity sha512-Gmy6FhYlCY7uOElZUSbxo2UCDH8owEk996gkbrpsgGtrJLM3J7jGxl9Ic7Qwwj4ivOE5AWZWRMecDdF7hqGjFA==
canvas@2.11.2, canvas@^2.11.2:
canvas@^2.11.2:
version "2.11.2"
resolved "https://registry.yarnpkg.com/canvas/-/canvas-2.11.2.tgz#553d87b1e0228c7ac0fc72887c3adbac4abbd860"
integrity sha512-ItanGBMrmRV7Py2Z+Xhs7cT+FNt5K0vPL4p9EZ/UX/Mu7hFbkxSjKF2KVtPwX7UYWp7dRKnrTvReflgrItJbdw==
@ -2785,11 +2785,6 @@ path-type@^4.0.0:
resolved "https://registry.yarnpkg.com/path-type/-/path-type-4.0.0.tgz#84ed01c0a7ba380afe09d90a8c180dcd9d03043b"
integrity sha512-gDKb8aZMDeD/tZWs9P6+q0J9Mwkdl6xMV8TjnGP3qJVJ06bdMgkbBlLU8IdfOsIsFz2BW1rNVT3XuNEl8zPAvw==
path2d@^0.2.0:
version "0.2.2"
resolved "https://registry.yarnpkg.com/path2d/-/path2d-0.2.2.tgz#cc85d61ed7827e7863a2ee36713d4b5315a3d85d"
integrity sha512-+vnG6S4dYcYxZd+CZxzXCNKdELYZSKfohrk98yajCo1PtRoDgCTrrwOvK1GT0UoAdVszagDVllQc0U1vaX4NUQ==
pdf-parse@^1.1.1:
version "1.1.1"
resolved "https://registry.yarnpkg.com/pdf-parse/-/pdf-parse-1.1.1.tgz#745e07408679548b3995ff896fd38e96e19d14a7"
@ -2798,14 +2793,6 @@ pdf-parse@^1.1.1:
debug "^3.1.0"
node-ensure "^0.0.0"
pdfjs-dist@4.2.67:
version "4.2.67"
resolved "https://registry.yarnpkg.com/pdfjs-dist/-/pdfjs-dist-4.2.67.tgz#dd2a65a4b00d95cd4bc2c1f6a27c5e9eb31d512a"
integrity sha512-rJmuBDFpD7cqC8WIkQUEClyB4UAH05K4AsyewToMTp2gSy3Rrx8c1ydAVqlJlGv3yZSOrhEERQU/4ScQQFlLHA==
optionalDependencies:
canvas "^2.11.2"
path2d "^0.2.0"
peberminta@^0.9.0:
version "0.9.0"
resolved "https://registry.yarnpkg.com/peberminta/-/peberminta-0.9.0.tgz#8ec9bc0eb84b7d368126e71ce9033501dca2a352"

View file

@ -1,12 +1,12 @@
import GitHub from "./github.svg";
import Gitlab from "./gitlab.svg";
import GitLab from "./gitlab.svg";
import YouTube from "./youtube.svg";
import Link from "./link.svg";
import Confluence from "./confluence.jpeg";
const ConnectorImages = {
github: GitHub,
gitlab: Gitlab,
gitlab: GitLab,
youtube: YouTube,
websiteDepth: Link,
confluence: Confluence,

View file

@ -126,6 +126,8 @@ const BLOCK_INFO = {
description: "Scrape content from a webpage",
defaultConfig: {
url: "",
captureAs: "text",
querySelector: "",
resultVariable: "",
},
getSummary: (config) => config.url || "No URL specified",

View file

@ -25,6 +25,48 @@ export default function WebScrapingNode({
/>
</div>
<div>
<label className="block text-sm font-medium text-theme-text-primary mb-2">
Capture Page Content As
</label>
<select
value={config.captureAs}
onChange={(e) => onConfigChange({ captureAs: e.target.value })}
className="w-full border-none bg-theme-settings-input-bg text-theme-text-primary text-sm rounded-lg focus:outline-primary-button active:outline-primary-button outline-none p-2.5"
>
{[
{ label: "Text content only", value: "text" },
{ label: "Raw HTML", value: "html" },
{ label: "CSS Query Selector", value: "querySelector" },
].map((captureAs) => (
<option
key={captureAs.value}
value={captureAs.value}
className="bg-theme-settings-input-bg"
>
{captureAs.label}
</option>
))}
</select>
</div>
{config.captureAs === "querySelector" && (
<div>
<label className="block text-sm font-medium text-theme-text-primary mb-2">
Query Selector
</label>
<p className="text-xs text-theme-text-secondary mb-2">
Enter a valid CSS selector to scrape the content of the page.
</p>
<input
value={config.querySelector}
onChange={(e) => onConfigChange({ querySelector: e.target.value })}
placeholder=".article-content, #content, .main-content, etc."
className="w-full border-none bg-theme-settings-input-bg text-theme-text-primary text-sm rounded-lg focus:outline-primary-button active:outline-primary-button outline-none p-2.5"
/>
</div>
)}
<div>
<label className="block text-sm font-medium text-theme-text-primary mb-2">
Result Variable

269
locales/README.tr-TR.md Normal file
View file

@ -0,0 +1,269 @@
<a name="readme-top"></a>
<p align="center">
<a href="https://anythingllm.com"><img src="https://github.com/Mintplex-Labs/anything-llm/blob/master/images/wordmark.png?raw=true" alt="AnythingLLM logo"></a>
</p>
<div align='center'>
<a href="https://trendshift.io/repositories/2415" target="_blank"><img src="https://trendshift.io/api/badge/repositories/2415" alt="Mintplex-Labs%2Fanything-llm | Trendshift" style="width: 250px; height: 55px;" width="250" height="55"/></a>
</div>
<p align="center">
<b>AnythingLLM:</b> Aradığınız hepsi bir arada yapay zeka uygulaması.<br />
Belgelerinizle sohbet edin, yapay zeka ajanlarını kullanın, son derece özelleştirilebilir, çok kullanıcılı ve zahmetsiz kurulum!
</p>
<p align="center">
<a href="https://discord.gg/6UyHPeGZAC" target="_blank">
<img src="https://img.shields.io/badge/chat-mintplex_labs-blue.svg?style=flat&logo=data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAACAAAAAgCAMAAABEpIrGAAAAIGNIUk0AAHomAACAhAAA+gAAAIDoAAB1MAAA6mAAADqYAAAXcJy6UTwAAAH1UExURQAAAP////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////r6+ubn5+7u7/3+/v39/enq6urq6/v7+97f39rb26eoqT1BQ0pOT4+Rkuzs7cnKykZKS0NHSHl8fdzd3ejo6UxPUUBDRdzc3RwgIh8jJSAkJm5xcvHx8aanqB4iJFBTVezt7V5hYlJVVuLj43p9fiImKCMnKZKUlaaoqSElJ21wcfT09O3u7uvr6zE0Nr6/wCUpK5qcnf7+/nh7fEdKTHx+f0tPUOTl5aipqiouMGtubz5CRDQ4OsTGxufn515hY7a3uH1/gXBydIOFhlVYWvX29qaoqCQoKs7Pz/Pz87/AwUtOUNfY2dHR0mhrbOvr7E5RUy8zNXR2d/f39+Xl5UZJSx0hIzQ3Odra2/z8/GlsbaGjpERHSezs7L/BwScrLTQ4Odna2zM3Obm7u3x/gKSmp9jZ2T1AQu/v71pdXkVISr2+vygsLiInKTg7PaOlpisvMcXGxzk8PldaXPLy8u7u7rm6u7S1tsDBwvj4+MPExbe4ueXm5s/Q0Kyf7ewAAAAodFJOUwAABClsrNjx/QM2l9/7lhmI6jTB/kA1GgKJN+nea6vy/MLZQYeVKK3rVA5tAAAAAWJLR0QB/wIt3gAAAAd0SU1FB+cKBAAmMZBHjXIAAAISSURBVDjLY2CAAkYmZhZWNnYODnY2VhZmJkYGVMDIycXNw6sBBbw8fFycyEoYGfkFBDVQgKAAPyMjQl5IWEQDDYgIC8FUMDKKsmlgAWyiEBWMjGJY5YEqxMAqGMWFNXAAYXGgAkYJSQ2cQFKCkYFRShq3AmkpRgYJbghbU0tbB0Tr6ukbgGhDI10gySfBwCwDUWBsYmpmDqQtLK2sbTQ0bO3sHYA8GWYGWWj4WTs6Obu4ami4OTm7exhqeHp5+4DCVJZBDmqdr7ufn3+ArkZgkJ+fU3CIRmgYWFiOARYGvo5OQUHhEUAFTkF+kVHRsLBgkIeyYmLjwoOc4hMSk5JTnINS06DC8gwcEEZ6RqZGlpOfc3ZObl5+gZ+TR2ERWFyBQQFMF5eklmqUpQb5+ReU61ZUOvkFVVXXQBSAraitq29o1GiKcfLzc29u0mjxBzq0tQ0kww5xZHtHUGeXhkZhdxBYgZ4d0LI6c4gjwd7siQQraOp1AivQ6CuAKZCDBBRQQQNQgUb/BGf3cqCCiZOcnCe3QQIKHNRTpk6bDgpZjRkzg3pBQTBrdtCcuZCgluAD0vPmL1gIdvSixUuWgqNs2YJ+DUhkEYxuggkGmOQUcckrioPTJCOXEnZ5JS5YslbGnuyVERlDDFvGEUPOWvwqaH6RVkHKeuDMK6SKnHlVhTgx8jeTmqy6Eij7K6nLqiGyPwChsa1MUrnq1wAAACV0RVh0ZGF0ZTpjcmVhdGUAMjAyMy0xMC0wNFQwMDozODo0OSswMDowMB9V0a8AAAAldEVYdGRhdGU6bW9kaWZ5ADIwMjMtMTAtMDRUMDA6Mzg6NDkrMDA6MDBuCGkTAAAAKHRFWHRkYXRlOnRpbWVzdGFtcAAyMDIzLTEwLTA0VDAwOjM4OjQ5KzAwOjAwOR1IzAAAAABJRU5ErkJggg==" alt="Discord">
</a> |
<a href="https://github.com/Mintplex-Labs/anything-llm/blob/master/LICENSE" target="_blank">
<img src="https://img.shields.io/static/v1?label=license&message=MIT&color=white" alt="License">
</a> |
<a href="https://docs.anythingllm.com" target="_blank">
Docs
</a> |
<a href="https://my.mintplexlabs.com/aio-checkout?product=anythingllm" target="_blank">
Hosted Instance
</a>
</p>
<p align="center">
<b>English</b> · <a href='./locales/README.zh-CN.md'>简体中文</a> · <a href='./locales/README.ja-JP.md'>日本語</a> · <a href='./locales/README.tr-TR.md'>Turkish</a>
</p>
<p align="center">
👉 Masaüstü için AnythingLLM (Mac, Windows ve Linux)! <a href="https://anythingllm.com/download" target="_blank"> Şimdi İndir</a>
</p>
Herhangi bir belgeyi, kaynağı veya içeriği sohbet sırasında herhangi bir büyük dil modelinin referans olarak kullanabileceği bir bağlama dönüştürmenizi sağlayan tam kapsamlı bir uygulama. Bu uygulama, kullanmak istediğiniz LLM veya Vektör Veritabanını seçmenize olanak tanırken, çok kullanıcılı yönetim ve yetkilendirme desteği de sunar.
![Mesajlaşma](https://github.com/Mintplex-Labs/anything-llm/assets/16845892/cfc5f47c-bd91-4067-986c-f3f49621a859)
<details>
<summary><kbd>Demoyu izle!</kbd></summary>
[![Video'yu izle](/images/youtube.png)](https://youtu.be/f95rGD9trL0)
</details>
### Ürün Genel Bakışı
AnythingLLM, ticari hazır büyük dil modellerini veya popüler açık kaynak LLM'leri ve vektör veritabanı çözümlerini kullanarak, hiçbir ödün vermeden özel bir ChatGPT oluşturmanıza olanak tanıyan tam kapsamlı bir uygulamadır. Bu uygulamayı yerel olarak çalıştırabilir veya uzaktan barındırarak sağladığınız belgelerle akıllı sohbetler gerçekleştirebilirsiniz.
AnythingLLM, belgelerinizi **"çalışma alanları" (workspaces)** adı verilen nesnelere ayırır. Bir çalışma alanı, bir sohbet dizisi gibi çalışır ancak belgelerinizi kapsülleyen bir yapı sunar. Çalışma alanları belgeleri paylaşabilir, ancak birbirleriyle iletişim kurmaz, böylece her çalışma alanının bağlamını temiz tutabilirsiniz.
## AnythingLLMin Harika Özellikleri
- 🆕 [**Özel Yapay Zeka Ajanları**](https://docs.anythingllm.com/agent/custom/introduction)
- 🆕 [**Kod yazmadan AI Ajanı oluşturma aracı**](https://docs.anythingllm.com/agent-flows/overview)
- 🖼️ **Çoklu-mod desteği (hem kapalı kaynak hem de açık kaynak LLM'ler!)**
- 👤 Çok kullanıcılı destek ve yetkilendirme _(Yalnızca Docker sürümünde)_
- 🦾 Çalışma alanı içinde ajanlar (web'de gezinme vb.)
- 💬 [Web sitenize gömülebilir özel sohbet aracı](https://github.com/Mintplex-Labs/anythingllm-embed/blob/main/README.md) _(Yalnızca Docker sürümünde)_
- 📖 Çoklu belge türü desteği (PDF, TXT, DOCX vb.)
- Sade ve kullanışlı sohbet arayüzü, sürükle-bırak özelliği ve net kaynak gösterimi.
- %100 bulut konuşlandırmaya hazır.
- [Tüm popüler kapalı ve açık kaynak LLM sağlayıcılarıyla](#supported-llms-embedder-models-speech-models-and-vector-databases) uyumlu.
- Büyük belgeleri yönetirken zaman ve maliyet tasarrufu sağlayan dahili optimizasyonlar.
- Özel entegrasyonlar için tam kapsamlı Geliştirici APIsi.
- Ve çok daha fazlası... Kurup keşfedin!
### Desteklenen LLM'ler, Embedding Modelleri, Konuşma Modelleri ve Vektör Veritabanları
**Büyük Dil Modelleri (LLMs):**
- [Any open-source llama.cpp compatible model](/server/storage/models/README.md#text-generation-llm-selection)
- [OpenAI](https://openai.com)
- [OpenAI (Generic)](https://openai.com)
- [Azure OpenAI](https://azure.microsoft.com/en-us/products/ai-services/openai-service)
- [AWS Bedrock](https://aws.amazon.com/bedrock/)
- [Anthropic](https://www.anthropic.com/)
- [NVIDIA NIM (chat models)](https://build.nvidia.com/explore/discover)
- [Google Gemini Pro](https://ai.google.dev/)
- [Hugging Face (chat models)](https://huggingface.co/)
- [Ollama (chat models)](https://ollama.ai/)
- [LM Studio (all models)](https://lmstudio.ai)
- [LocalAi (all models)](https://localai.io/)
- [Together AI (chat models)](https://www.together.ai/)
- [Fireworks AI (chat models)](https://fireworks.ai/)
- [Perplexity (chat models)](https://www.perplexity.ai/)
- [OpenRouter (chat models)](https://openrouter.ai/)
- [DeepSeek (chat models)](https://deepseek.com/)
- [Mistral](https://mistral.ai/)
- [Groq](https://groq.com/)
- [Cohere](https://cohere.com/)
- [KoboldCPP](https://github.com/LostRuins/koboldcpp)
- [LiteLLM](https://github.com/BerriAI/litellm)
- [Text Generation Web UI](https://github.com/oobabooga/text-generation-webui)
- [Apipie](https://apipie.ai/)
- [xAI](https://x.ai/)
- [Novita AI (chat models)](https://novita.ai/model-api/product/llm-api?utm_source=github_anything-llm&utm_medium=github_readme&utm_campaign=link)
**Embedder modelleri:**
- [AnythingLLM Native Embedder](/server/storage/models/README.md) (default)
- [OpenAI](https://openai.com)
- [Azure OpenAI](https://azure.microsoft.com/en-us/products/ai-services/openai-service)
- [LocalAi (all)](https://localai.io/)
- [Ollama (all)](https://ollama.ai/)
- [LM Studio (all)](https://lmstudio.ai)
- [Cohere](https://cohere.com/)
**Ses Transkripsiyon Modelleri:**
- [AnythingLLM Built-in](https://github.com/Mintplex-Labs/anything-llm/tree/master/server/storage/models#audiovideo-transcription) (default)
- [OpenAI](https://openai.com/)
**TTS (text-to-speech) desteği:**
- Native Browser Built-in (default)
- [PiperTTSLocal - runs in browser](https://github.com/rhasspy/piper)
- [OpenAI TTS](https://platform.openai.com/docs/guides/text-to-speech/voice-options)
- [ElevenLabs](https://elevenlabs.io/)
- Any OpenAI Compatible TTS service.
**STT (speech-to-text) desteği:**
- Native Browser Built-in (default)
**Vektör Databases:**
- [LanceDB](https://github.com/lancedb/lancedb) (default)
- [Astra DB](https://www.datastax.com/products/datastax-astra)
- [Pinecone](https://pinecone.io)
- [Chroma](https://trychroma.com)
- [Weaviate](https://weaviate.io)
- [Qdrant](https://qdrant.tech)
- [Milvus](https://milvus.io)
- [Zilliz](https://zilliz.com)
### Teknik Genel Bakış
Bu monorepo üç ana bölümden oluşmaktadır:
- **`frontend`**: ViteJS + React tabanlı bir ön yüz, LLM'in kullanabileceği tüm içeriği kolayca oluşturup yönetmenizi sağlar.
- **`server`**: NodeJS ve Express tabanlı bir sunucu, tüm etkileşimleri yönetir ve vektör veritabanı işlemleri ile LLM entegrasyonlarını gerçekleştirir.
- **`collector`**: Kullanıcı arayüzünden gelen belgeleri işleyen ve ayrıştıran NodeJS Express tabanlı bir sunucu.
- **`docker`**: Docker kurulum talimatları, derleme süreci ve kaynak koddan nasıl derleneceğine dair bilgiler içerir.
- **`embed`**: [Web gömme widgetı](https://github.com/Mintplex-Labs/anythingllm-embed) oluşturma ve entegrasyonu için alt modül.
- **`browser-extension`**: [Chrome tarayıcı eklentisi](https://github.com/Mintplex-Labs/anythingllm-extension) için alt modül.
## 🛳 Kendi Sunucunuzda Barındırma
Mintplex Labs ve topluluk, AnythingLLM'i yerel olarak çalıştırmak için çeşitli dağıtım yöntemleri, betikler ve şablonlar sunmaktadır. Aşağıdaki tabloya göz atarak tercih ettiğiniz ortamda nasıl dağıtım yapabileceğinizi öğrenebilir veya otomatik dağıtım seçeneklerini keşfedebilirsiniz.
| Docker | AWS | GCP | Digital Ocean | Render.com |
|----------------------------------------|----|-----|---------------|------------|
| [![Deploy on Docker][docker-btn]][docker-deploy] | [![Deploy on AWS][aws-btn]][aws-deploy] | [![Deploy on GCP][gcp-btn]][gcp-deploy] | [![Deploy on DigitalOcean][do-btn]][do-deploy] | [![Deploy on Render.com][render-btn]][render-deploy] |
| Railway | RepoCloud | Elestio |
| --- | --- | --- |
| [![Deploy on Railway][railway-btn]][railway-deploy] | [![Deploy on RepoCloud][repocloud-btn]][repocloud-deploy] | [![Deploy on Elestio][elestio-btn]][elestio-deploy] |
[veya Docker kullanmadan üretim ortamında AnythingLLM kurun →](./BARE_METAL.md)
## Geliştirme İçin Kurulum
- `yarn setup` → Uygulamanın her bileşeni için gerekli `.env` dosyalarını oluşturur (reponun kök dizininden çalıştırılmalıdır).
- Devam etmeden önce bu dosyaları doldurun. **Özellikle `server/.env.development` dosyasının doldurulduğundan emin olun**, aksi takdirde sistem düzgün çalışmaz.
- `yarn dev:server` → Sunucuyu yerel olarak başlatır (reponun kök dizininden çalıştırılmalıdır).
- `yarn dev:frontend` → Ön yüzü yerel olarak çalıştırır (reponun kök dizininden çalıştırılmalıdır).
- `yarn dev:collector` → Belge toplayıcıyı çalıştırır (reponun kök dizininden çalıştırılmalıdır).
[Belgeler hakkında bilgi edinin](./server/storage/documents/DOCUMENTS.md)
[Vektör önbellekleme hakkında bilgi edinin](./server/storage/vector-cache/VECTOR_CACHE.md)
## Harici Uygulamalar ve Entegrasyonlar
_Bu uygulamalar Mintplex Labs tarafından yönetilmemektedir, ancak AnythingLLM ile uyumludur. Burada listelenmeleri bir onay anlamına gelmez._
- [Midori AI Alt Sistem Yöneticisi](https://io.midori-ai.xyz/subsystem/anythingllm/) - Docker konteyner teknolojisini kullanarak yapay zeka sistemlerini verimli bir şekilde dağıtmanın pratik bir yolu.
- [Coolify](https://coolify.io/docs/services/anythingllm/) - Tek tıklamayla AnythingLLM dağıtımı yapmanıza olanak tanır.
- [GPTLocalhost for Microsoft Word](https://gptlocalhost.com/demo/) - AnythingLLMi Microsoft Word içinde kullanmanıza olanak tanıyan yerel bir Word eklentisi.
## Telemetri ve Gizlilik
Mintplex Labs Inc. tarafından geliştirilen AnythingLLM, anonim kullanım bilgilerini toplayan bir telemetri özelliği içermektedir.
<details>
<summary><kbd>AnythingLLM için Telemetri ve Gizlilik hakkında daha fazla bilgi</kbd></summary>
### Neden?
Bu bilgileri, AnythingLLMin nasıl kullanıldığını anlamak, yeni özellikler ve hata düzeltmelerine öncelik vermek ve uygulamanın performansını ve kararlılığını iyileştirmek için kullanıyoruz.
### Telemetriden Çıkış Yapma (Opt-Out)
Sunucu veya Docker `.env` ayarlarında `DISABLE_TELEMETRY` değerini "true" olarak ayarlayarak telemetriyi devre dışı bırakabilirsiniz. Ayrıca, uygulama içinde **Kenar Çubuğu > Gizlilik** bölümüne giderek de bu özelliği kapatabilirsiniz.
### Hangi Verileri Açıkça Takip Ediyoruz?
Yalnızca ürün ve yol haritası kararlarını almamıza yardımcı olacak kullanım detaylarını takip ediyoruz:
- Kurulum türü (Docker veya Masaüstü)
- Bir belgenin eklenme veya kaldırılma olayı. **Belgenin içeriği hakkında hiçbir bilgi toplanmaz**, yalnızca olayın gerçekleştiği kaydedilir. Bu, kullanım sıklığını anlamamıza yardımcı olur.
- Kullanılan vektör veritabanı türü. Hangi sağlayıcının daha çok tercih edildiğini belirlemek için bu bilgiyi topluyoruz.
- Kullanılan LLM türü. En popüler modelleri belirleyerek bu sağlayıcılara öncelik verebilmemizi sağlar.
- Sohbet başlatılması. Bu en sık gerçekleşen "olay" olup, projenin günlük etkinliği hakkında genel bir fikir edinmemize yardımcı olur. **Yalnızca olay kaydedilir, sohbetin içeriği veya doğası hakkında hiçbir bilgi toplanmaz.**
Bu verileri doğrulamak için kod içinde **`Telemetry.sendTelemetry` çağrılarını** inceleyebilirsiniz. Ayrıca, bu olaylar günlük kaydına yazıldığı için hangi verilerin gönderildiğini görebilirsiniz (eğer etkinleştirilmişse). **IP adresi veya diğer tanımlayıcı bilgiler toplanmaz.** Telemetri sağlayıcısı, açık kaynaklı bir telemetri toplama hizmeti olan [PostHog](https://posthog.com/)dur.
[Kaynak kodda tüm telemetri olaylarını görüntüle](https://github.com/search?q=repo%3AMintplex-Labs%2Fanything-llm%20.sendTelemetry\(&type=code)
</details>
## 👋 Katkıda Bulunma
- Bir **issue** oluşturun.
- `<issue numarası>-<kısa ad>` formatında bir **PR (Pull Request)** oluşturun.
- Çekirdek ekipten **LGTM (Looks Good To Me)** onayı alın.
## 🌟 Katkıda Bulunanlar
[![anythingllm contributors](https://contrib.rocks/image?repo=mintplex-labs/anything-llm)](https://github.com/mintplex-labs/anything-llm/graphs/contributors)
[![Star History Chart](https://api.star-history.com/svg?repos=mintplex-labs/anything-llm&type=Timeline)](https://star-history.com/#mintplex-labs/anything-llm&Date)
## 🔗 Diğer Ürünler
- **[VectorAdmin][vector-admin]:** Vektör veritabanlarını yönetmek için hepsi bir arada GUI ve araç paketi.
- **[OpenAI Assistant Swarm][assistant-swarm]:** Tüm OpenAI asistanlarınızı tek bir ajan tarafından yönetilen bir yapay zeka ordusuna dönüştürün.
<div align="right">
[![][back-to-top]](#readme-top)
</div>
---
Telif Hakkı © 2025 [Mintplex Labs][profile-link]. <br />
Bu proje [MIT](./LICENSE) lisansı ile lisanslanmıştır.
<!-- LINK GROUP -->
[back-to-top]: https://img.shields.io/badge/-BACK_TO_TOP-222628?style=flat-square
[profile-link]: https://github.com/mintplex-labs
[vector-admin]: https://github.com/mintplex-labs/vector-admin
[assistant-swarm]: https://github.com/Mintplex-Labs/openai-assistant-swarm
[docker-btn]: ./images/deployBtns/docker.png
[docker-deploy]: ./docker/HOW_TO_USE_DOCKER.md
[aws-btn]: ./images/deployBtns/aws.png
[aws-deploy]: ./cloud-deployments/aws/cloudformation/DEPLOY.md
[gcp-btn]: https://deploy.cloud.run/button.svg
[gcp-deploy]: ./cloud-deployments/gcp/deployment/DEPLOY.md
[do-btn]: https://www.deploytodo.com/do-btn-blue.svg
[do-deploy]: ./cloud-deployments/digitalocean/terraform/DEPLOY.md
[render-btn]: https://render.com/images/deploy-to-render-button.svg
[render-deploy]: https://render.com/deploy?repo=https://github.com/Mintplex-Labs/anything-llm&branch=render
[render-btn]: https://render.com/images/deploy-to-render-button.svg
[render-deploy]: https://render.com/deploy?repo=https://github.com/Mintplex-Labs/anything-llm&branch=render
[railway-btn]: https://railway.app/button.svg
[railway-deploy]: https://railway.app/template/HNSCS1?referralCode=WFgJkn
[repocloud-btn]: https://d16t0pc4846x52.cloudfront.net/deploylobe.svg
[repocloud-deploy]: https://repocloud.io/details/?app_id=276
[elestio-btn]: https://elest.io/images/logos/deploy-to-elestio-btn.png
[elestio-deploy]: https://elest.io/open-source/anythingllm

View file

@ -51,7 +51,7 @@ const DocumentSyncQueue = {
if (chunkSource.startsWith("youtube://")) return true; // If is a youtube link
if (chunkSource.startsWith("confluence://")) return true; // If is a confluence document link
if (chunkSource.startsWith("github://")) return true; // If is a GitHub file reference
if (chunkSource.startsWith("gitlab://")) return true; // If is a Gitlab file reference
if (chunkSource.startsWith("gitlab://")) return true; // If is a GitLab file reference
return false;
},

View file

@ -43,6 +43,7 @@
"body-parser": "^1.20.2",
"chalk": "^4",
"check-disk-space": "^3.4.0",
"cheerio": "^1.0.0",
"chromadb": "^1.5.2",
"cohere-ai": "^7.9.5",
"cors": "^2.8.5",

View file

@ -7,4 +7,4 @@ novita
mixedbread-ai*
gemini
togetherAi
ocr
tesseract

View file

@ -10,15 +10,22 @@ const { summarizeContent } = require("../../agents/aibitat/utils/summarize");
* @returns {Promise<string>} Scraped content
*/
async function executeWebScraping(config, context) {
const { url } = config;
const { url, captureAs = "text" } = config;
const { introspect, model, provider } = context;
if (!url) {
throw new Error("URL is required for web scraping");
}
introspect(`Scraping the content of ${url}`);
const { success, content } = await new CollectorApi().getLinkContent(url);
// Remap the captureAs to the correct mode for the CollectorApi
const captureMode = captureAs === "querySelector" ? "html" : captureAs;
introspect(`Scraping the content of ${url} as ${captureAs}`);
const { success, content } = await new CollectorApi()
.getLinkContent(url, captureMode)
.then((res) => {
if (captureAs !== "querySelector") return res;
return parseHTMLwithSelector(res.content, config.querySelector, context);
});
if (!success) {
introspect(`Could not scrape ${url}. Cannot use this page's content.`);
@ -52,4 +59,38 @@ async function executeWebScraping(config, context) {
return summary;
}
/**
* Parse HTML with a CSS selector
* @param {string} html - The HTML to parse
* @param {string|null} selector - The CSS selector to use (as text string)
* @param {{introspect: Function}} context - The context object
* @returns {Object} The parsed content
*/
function parseHTMLwithSelector(html, selector = null, context) {
if (!selector || selector.length === 0) {
context.introspect("No selector provided. Returning the entire HTML.");
return { success: true, content: html };
}
const Cheerio = require("cheerio");
const $ = Cheerio.load(html);
const selectedElements = $(selector);
let content;
if (selectedElements.length === 0) {
return { success: false, content: null };
} else if (selectedElements.length === 1) {
content = selectedElements.html();
} else {
context.introspect(
`Found ${selectedElements.length} elements matching selector: ${selector}`
);
content = selectedElements
.map((_, element) => $(element).html())
.get()
.join("\n");
}
return { success: true, content };
}
module.exports = executeWebScraping;

View file

@ -148,10 +148,10 @@ class CollectorApi {
});
}
async getLinkContent(link = "") {
async getLinkContent(link = "", captureAs = "text") {
if (!link) return false;
const data = JSON.stringify({ link });
const data = JSON.stringify({ link, captureAs });
return await fetch(`${this.endpoint}/util/get-link`, {
method: "POST",
headers: {

View file

@ -2490,6 +2490,35 @@ check-disk-space@^3.4.0:
resolved "https://registry.yarnpkg.com/check-disk-space/-/check-disk-space-3.4.0.tgz#eb8e69eee7a378fd12e35281b8123a8b4c4a8ff7"
integrity sha512-drVkSqfwA+TvuEhFipiR1OC9boEGZL5RrWvVsOthdcvQNXyCCuKkEiTOTXZ7qxSf/GLwq4GvzfrQD/Wz325hgw==
cheerio-select@^2.1.0:
version "2.1.0"
resolved "https://registry.yarnpkg.com/cheerio-select/-/cheerio-select-2.1.0.tgz#4d8673286b8126ca2a8e42740d5e3c4884ae21b4"
integrity sha512-9v9kG0LvzrlcungtnJtpGNxY+fzECQKhK4EGJX2vByejiMX84MFNQw4UxPJl3bFbTMw+Dfs37XaIkCwTZfLh4g==
dependencies:
boolbase "^1.0.0"
css-select "^5.1.0"
css-what "^6.1.0"
domelementtype "^2.3.0"
domhandler "^5.0.3"
domutils "^3.0.1"
cheerio@^1.0.0:
version "1.0.0"
resolved "https://registry.yarnpkg.com/cheerio/-/cheerio-1.0.0.tgz#1ede4895a82f26e8af71009f961a9b8cb60d6a81"
integrity sha512-quS9HgjQpdaXOvsZz82Oz7uxtXiy6UIsIQcpBj7HRw2M63Skasm9qlDocAM7jNuaxdhpPU7c4kJN+gA5MCu4ww==
dependencies:
cheerio-select "^2.1.0"
dom-serializer "^2.0.0"
domhandler "^5.0.3"
domutils "^3.1.0"
encoding-sniffer "^0.2.0"
htmlparser2 "^9.1.0"
parse5 "^7.1.2"
parse5-htmlparser2-tree-adapter "^7.0.0"
parse5-parser-stream "^7.1.2"
undici "^6.19.5"
whatwg-mimetype "^4.0.0"
chokidar@^3.5.2:
version "3.6.0"
resolved "https://registry.yarnpkg.com/chokidar/-/chokidar-3.6.0.tgz#197c6cc669ef2a8dc5e7b4d97ee4e092c3eb0d5b"
@ -2963,6 +2992,15 @@ domutils@^3.0.1:
domelementtype "^2.3.0"
domhandler "^5.0.3"
domutils@^3.1.0:
version "3.2.2"
resolved "https://registry.yarnpkg.com/domutils/-/domutils-3.2.2.tgz#edbfe2b668b0c1d97c24baf0f1062b132221bc78"
integrity sha512-6kZKyUajlDuqlHKVX1w7gyslj9MPIXzIFiz/rGu35uC1wMi+kMhQwGhl4lt9unC9Vb9INnY9Z3/ZA3+FhASLaw==
dependencies:
dom-serializer "^2.0.0"
domelementtype "^2.3.0"
domhandler "^5.0.3"
dotenv@^16.0.3:
version "16.4.5"
resolved "https://registry.yarnpkg.com/dotenv/-/dotenv-16.4.5.tgz#cdd3b3b604cb327e286b4762e13502f717cb099f"
@ -3012,6 +3050,14 @@ encodeurl@~1.0.2:
resolved "https://registry.yarnpkg.com/encodeurl/-/encodeurl-1.0.2.tgz#ad3ff4c86ec2d029322f5a02c3a9a606c95b3f59"
integrity sha512-TPJXq8JqFaVYm2CWmPvnP2Iyo4ZSM7/QKcSmuMLDObfpH5fi7RUGmd/rTDf+rut/saiDiQEeVTNgAmJEdAOx0w==
encoding-sniffer@^0.2.0:
version "0.2.0"
resolved "https://registry.yarnpkg.com/encoding-sniffer/-/encoding-sniffer-0.2.0.tgz#799569d66d443babe82af18c9f403498365ef1d5"
integrity sha512-ju7Wq1kg04I3HtiYIOrUrdfdDvkyO9s5XM8QAj/bN61Yo/Vb4vgJxy5vi4Yxk01gWHbrofpPtpxM8bKger9jhg==
dependencies:
iconv-lite "^0.6.3"
whatwg-encoding "^3.1.1"
encoding@^0.1.13:
version "0.1.13"
resolved "https://registry.yarnpkg.com/encoding/-/encoding-0.1.13.tgz#56574afdd791f54a8e9b2785c0582a2d26210fa9"
@ -3026,7 +3072,7 @@ end-of-stream@^1.1.0, end-of-stream@^1.4.1:
dependencies:
once "^1.4.0"
entities@^4.2.0:
entities@^4.2.0, entities@^4.5.0:
version "4.5.0"
resolved "https://registry.yarnpkg.com/entities/-/entities-4.5.0.tgz#5d268ea5e7113ec74c4d033b79ea5a35a488fb48"
integrity sha512-V0hjH4dGPh9Ao5p0MoRY6BVqtwCjhz6vI5LT8AJ55H+4g9/4vbHx1I54fS0XuclLhDHArPQCiMjDxjaL8fPxhw==
@ -3882,6 +3928,16 @@ hermes-parser@0.20.1:
dependencies:
hermes-estree "0.20.1"
htmlparser2@^9.1.0:
version "9.1.0"
resolved "https://registry.yarnpkg.com/htmlparser2/-/htmlparser2-9.1.0.tgz#cdb498d8a75a51f739b61d3f718136c369bc8c23"
integrity sha512-5zfg6mHUoaer/97TxnGpxmbR7zJtPwIYFMZ/H5ucTlPZhKvtum05yiPK3Mgai3a0DyVxv7qYqoweaEd2nrYQzQ==
dependencies:
domelementtype "^2.3.0"
domhandler "^5.0.3"
domutils "^3.1.0"
entities "^4.5.0"
http-errors@2.0.0:
version "2.0.0"
resolved "https://registry.yarnpkg.com/http-errors/-/http-errors-2.0.0.tgz#b7774a1486ef73cf7667ac9ae0858c012c57b9d3"
@ -3943,7 +3999,7 @@ iconv-lite@0.4.24, iconv-lite@^0.4.24:
dependencies:
safer-buffer ">= 2.1.2 < 3"
iconv-lite@^0.6.2, iconv-lite@^0.6.3:
iconv-lite@0.6.3, iconv-lite@^0.6.2, iconv-lite@^0.6.3:
version "0.6.3"
resolved "https://registry.yarnpkg.com/iconv-lite/-/iconv-lite-0.6.3.tgz#a52f80bf38da1952eb5c681790719871a1a72501"
integrity sha512-4fCk79wshMdzMp2rH06qWrJE4iolqLhCUH+OiuIgU++RB0+94NlDL81atO7GX55uUKueo0txHNtvEyI6D7WdMw==
@ -5342,6 +5398,28 @@ parent-module@^1.0.0:
dependencies:
callsites "^3.0.0"
parse5-htmlparser2-tree-adapter@^7.0.0:
version "7.1.0"
resolved "https://registry.yarnpkg.com/parse5-htmlparser2-tree-adapter/-/parse5-htmlparser2-tree-adapter-7.1.0.tgz#b5a806548ed893a43e24ccb42fbb78069311e81b"
integrity sha512-ruw5xyKs6lrpo9x9rCZqZZnIUntICjQAd0Wsmp396Ul9lN/h+ifgVV1x1gZHi8euej6wTfpqX8j+BFQxF0NS/g==
dependencies:
domhandler "^5.0.3"
parse5 "^7.0.0"
parse5-parser-stream@^7.1.2:
version "7.1.2"
resolved "https://registry.yarnpkg.com/parse5-parser-stream/-/parse5-parser-stream-7.1.2.tgz#d7c20eadc37968d272e2c02660fff92dd27e60e1"
integrity sha512-JyeQc9iwFLn5TbvvqACIF/VXG6abODeB3Fwmv/TGdLk2LfbWkaySGY72at4+Ty7EkPZj854u4CrICqNk2qIbow==
dependencies:
parse5 "^7.0.0"
parse5@^7.0.0, parse5@^7.1.2:
version "7.2.1"
resolved "https://registry.yarnpkg.com/parse5/-/parse5-7.2.1.tgz#8928f55915e6125f430cc44309765bf17556a33a"
integrity sha512-BuBYQYlv1ckiPdQi/ohiivi9Sagc9JG+Ozs0r7b/0iK3sKmrb0b9FdWdBbOdx6hBCM/F9Ir82ofnBhtZOjCRPQ==
dependencies:
entities "^4.5.0"
parseurl@~1.3.3:
version "1.3.3"
resolved "https://registry.yarnpkg.com/parseurl/-/parseurl-1.3.3.tgz#9da19e7bee8d12dff0513ed5b76957793bc2e8d4"
@ -6482,6 +6560,11 @@ undici-types@~5.26.4:
resolved "https://registry.yarnpkg.com/undici-types/-/undici-types-5.26.5.tgz#bcd539893d00b56e964fd2657a4866b221a65617"
integrity sha512-JlCMO+ehdEIKqlFxk6IfVoAUVmgz7cU7zD/h9XZ0qzeosSHmUJVOzSQvvYSYWXkFXC+IfLKSIffhv0sVZup6pA==
undici@^6.19.5:
version "6.21.1"
resolved "https://registry.yarnpkg.com/undici/-/undici-6.21.1.tgz#336025a14162e6837e44ad7b819b35b6c6af0e05"
integrity sha512-q/1rj5D0/zayJB2FraXdaWxbhWiNKDvu8naDT2dl1yTlvJp4BLtOcp2a5BvgGNQpYYJzau7tf1WgKv3b+7mqpQ==
undici@~5.28.4:
version "5.28.4"
resolved "https://registry.yarnpkg.com/undici/-/undici-5.28.4.tgz#6b280408edb6a1a604a9b20340f45b422e373068"
@ -6580,11 +6663,23 @@ webidl-conversions@^3.0.0:
resolved "https://registry.yarnpkg.com/webidl-conversions/-/webidl-conversions-3.0.1.tgz#24534275e2a7bc6be7bc86611cc16ae0a5654871"
integrity sha512-2JAn3z8AR6rjK8Sm8orRC0h/bcl/DqL7tRPdGZ4I1CjdF+EaMLmYxBHyXuKL849eucPFhvBoxMsflfOb8kxaeQ==
whatwg-encoding@^3.1.1:
version "3.1.1"
resolved "https://registry.yarnpkg.com/whatwg-encoding/-/whatwg-encoding-3.1.1.tgz#d0f4ef769905d426e1688f3e34381a99b60b76e5"
integrity sha512-6qN4hJdMwfYBtE3YBTTHhoeuUrDBPZmbQaxWAqSALV/MeEnR5z1xd8UKud2RAkFoPkmB+hli1TZSnyi84xz1vQ==
dependencies:
iconv-lite "0.6.3"
whatwg-fetch@^3.4.1, whatwg-fetch@^3.6.20:
version "3.6.20"
resolved "https://registry.yarnpkg.com/whatwg-fetch/-/whatwg-fetch-3.6.20.tgz#580ce6d791facec91d37c72890995a0b48d31c70"
integrity sha512-EqhiFU6daOA8kpjOWTL0olhVOF3i7OrFzSYiGsEMB8GcXS+RrzauAERX65xMeNWVqxA6HXH2m69Z9LaKKdisfg==
whatwg-mimetype@^4.0.0:
version "4.0.0"
resolved "https://registry.yarnpkg.com/whatwg-mimetype/-/whatwg-mimetype-4.0.0.tgz#bc1bf94a985dc50388d54a9258ac405c3ca2fc0a"
integrity sha512-QaKxh0eNIi2mE9p2vEdzfagOKHCcj1pJ56EEHGQOVxp8r9/iszLUUV7v89x9O1p/T+NlTM5W7jW6+cz4Fq1YVg==
whatwg-url@^5.0.0:
version "5.0.0"
resolved "https://registry.yarnpkg.com/whatwg-url/-/whatwg-url-5.0.0.tgz#966454e8765462e37644d3626f6742ce8b70965d"