mirror of
https://github.com/Mintplex-Labs/anything-llm.git
synced 2025-05-02 17:07:13 +00:00
Bump perplexity models (#1905)
* Added Supported Models Free Tier - chat_models.txt Need to fill in correct Parameter Count. * Bump perplexity model closes #1901 closes #1900 --------- Co-authored-by: Tim-Hoekstra <135951177+Tim-Hoekstra@users.noreply.github.com>
This commit is contained in:
parent
61abb37603
commit
5df6b5f7d9
3 changed files with 27 additions and 45 deletions
server/utils/AiProviders/perplexity
|
@ -1,23 +1,23 @@
|
|||
const MODELS = {
|
||||
"sonar-small-chat": {
|
||||
id: "sonar-small-chat",
|
||||
name: "sonar-small-chat",
|
||||
maxLength: 16384,
|
||||
"llama-3-sonar-small-32k-online\\*": {
|
||||
id: "llama-3-sonar-small-32k-online\\*",
|
||||
name: "llama-3-sonar-small-32k-online\\*",
|
||||
maxLength: 28000,
|
||||
},
|
||||
"sonar-small-online": {
|
||||
id: "sonar-small-online",
|
||||
name: "sonar-small-online",
|
||||
maxLength: 12000,
|
||||
"llama-3-sonar-small-32k-chat": {
|
||||
id: "llama-3-sonar-small-32k-chat",
|
||||
name: "llama-3-sonar-small-32k-chat",
|
||||
maxLength: 32768,
|
||||
},
|
||||
"sonar-medium-chat": {
|
||||
id: "sonar-medium-chat",
|
||||
name: "sonar-medium-chat",
|
||||
maxLength: 16384,
|
||||
"llama-3-sonar-large-32k-online\\*": {
|
||||
id: "llama-3-sonar-large-32k-online\\*",
|
||||
name: "llama-3-sonar-large-32k-online\\*",
|
||||
maxLength: 28000,
|
||||
},
|
||||
"sonar-medium-online": {
|
||||
id: "sonar-medium-online",
|
||||
name: "sonar-medium-online",
|
||||
maxLength: 12000,
|
||||
"llama-3-sonar-large-32k-chat": {
|
||||
id: "llama-3-sonar-large-32k-chat",
|
||||
name: "llama-3-sonar-large-32k-chat",
|
||||
maxLength: 32768,
|
||||
},
|
||||
"llama-3-8b-instruct": {
|
||||
id: "llama-3-8b-instruct",
|
||||
|
@ -29,26 +29,11 @@ const MODELS = {
|
|||
name: "llama-3-70b-instruct",
|
||||
maxLength: 8192,
|
||||
},
|
||||
"codellama-70b-instruct": {
|
||||
id: "codellama-70b-instruct",
|
||||
name: "codellama-70b-instruct",
|
||||
maxLength: 16384,
|
||||
},
|
||||
"mistral-7b-instruct": {
|
||||
id: "mistral-7b-instruct",
|
||||
name: "mistral-7b-instruct",
|
||||
maxLength: 16384,
|
||||
},
|
||||
"mixtral-8x7b-instruct": {
|
||||
id: "mixtral-8x7b-instruct",
|
||||
name: "mixtral-8x7b-instruct",
|
||||
maxLength: 16384,
|
||||
},
|
||||
"mixtral-8x22b-instruct": {
|
||||
id: "mixtral-8x22b-instruct",
|
||||
name: "mixtral-8x22b-instruct",
|
||||
maxLength: 16384,
|
||||
},
|
||||
};
|
||||
|
||||
module.exports.MODELS = MODELS;
|
||||
|
|
|
@ -1,12 +1,9 @@
|
|||
| Model | Parameter Count | Context Length | Model Type |
|
||||
| :-------------------- | :-------------- | :------------- | :-------------- |
|
||||
| `sonar-small-chat` | 7B | 16384 | Chat Completion |
|
||||
| `sonar-small-online` | 7B | 12000 | Chat Completion |
|
||||
| `sonar-medium-chat` | 8x7B | 16384 | Chat Completion |
|
||||
| `sonar-medium-online` | 8x7B | 12000 | Chat Completion |
|
||||
| `llama-3-8b-instruct` | 8B | 8192 | Chat Completion |
|
||||
| `llama-3-70b-instruct` | 70B | 8192 | Chat Completion |
|
||||
| `codellama-70b-instruct` | 70B | 16384 | Chat Completion |
|
||||
| `mistral-7b-instruct` [1] | 7B | 16384 | Chat Completion |
|
||||
| `mixtral-8x7b-instruct` | 8x7B | 16384 | Chat Completion |
|
||||
| `mixtral-8x22b-instruct` | 8x22B | 16384 | Chat Completion |
|
||||
| Model | Parameter Count | Context Length | Model Type |
|
||||
| :--------------------------------- | :-------------- | :------------- | :-------------- |
|
||||
| `llama-3-sonar-small-32k-online`\* | 8B | 28,000 | Chat Completion |
|
||||
| `llama-3-sonar-small-32k-chat` | 8B | 32,768 | Chat Completion |
|
||||
| `llama-3-sonar-large-32k-online`\* | 70B | 28,000 | Chat Completion |
|
||||
| `llama-3-sonar-large-32k-chat` | 70B | 32,768 | Chat Completion |
|
||||
| `llama-3-8b-instruct` | 8B | 8,192 | Chat Completion |
|
||||
| `llama-3-70b-instruct` | 70B | 8,192 | Chat Completion |
|
||||
| `mixtral-8x7b-instruct` | 8x7B | 16,384 | Chat Completion |
|
|
@ -8,7 +8,7 @@
|
|||
// copy outputs into the export in ../models.js
|
||||
|
||||
// Update the date below if you run this again because Perplexity added new models.
|
||||
// Last Collected: Apr 25, 2024
|
||||
// Last Collected: Jul 19, 2024
|
||||
|
||||
import fs from "fs";
|
||||
|
||||
|
@ -23,7 +23,7 @@ function parseChatModels() {
|
|||
.slice(1, -1)
|
||||
.map((text) => text.trim());
|
||||
model = model.replace(/`|\s*\[\d+\]\s*/g, "");
|
||||
const maxLength = Number(contextLength.replace(/\s*\[\d+\]\s*/g, ""));
|
||||
const maxLength = Number(contextLength.replace(/[^\d]/g, ""));
|
||||
if (model && maxLength) {
|
||||
models[model] = {
|
||||
id: model,
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue