Bump perplexity models ()

* bump perplexity models

---------

Co-authored-by: Timothy Carambat <rambat1010@gmail.com>
This commit is contained in:
Sean Hatfield 2025-01-24 08:35:38 +08:00 committed by GitHub
parent 9584a7e140
commit 57f4f46a39
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
3 changed files with 17 additions and 42 deletions
server/utils/AiProviders/perplexity

View file

@ -1,39 +1,14 @@
const MODELS = {
"llama-3.1-sonar-small-128k-online": {
id: "llama-3.1-sonar-small-128k-online",
name: "llama-3.1-sonar-small-128k-online",
maxLength: 127072,
},
"llama-3.1-sonar-large-128k-online": {
id: "llama-3.1-sonar-large-128k-online",
name: "llama-3.1-sonar-large-128k-online",
maxLength: 127072,
},
"llama-3.1-sonar-huge-128k-online": {
id: "llama-3.1-sonar-huge-128k-online",
name: "llama-3.1-sonar-huge-128k-online",
maxLength: 127072,
},
"llama-3.1-sonar-small-128k-chat": {
id: "llama-3.1-sonar-small-128k-chat",
name: "llama-3.1-sonar-small-128k-chat",
maxLength: 131072,
},
"llama-3.1-sonar-large-128k-chat": {
id: "llama-3.1-sonar-large-128k-chat",
name: "llama-3.1-sonar-large-128k-chat",
maxLength: 131072,
},
"llama-3.1-8b-instruct": {
id: "llama-3.1-8b-instruct",
name: "llama-3.1-8b-instruct",
maxLength: 131072,
},
"llama-3.1-70b-instruct": {
id: "llama-3.1-70b-instruct",
name: "llama-3.1-70b-instruct",
maxLength: 131072,
"sonar-pro": {
"id": "sonar-pro",
"name": "sonar-pro",
"maxLength": 200000
},
"sonar": {
"id": "sonar",
"name": "sonar",
"maxLength": 127072
}
};
module.exports.MODELS = MODELS;

View file

@ -1,9 +1,4 @@
| Model | Parameter Count | Context Length | Model Type |
| :---------------------------------- | :-------------- | :------------- | :-------------- |
| `llama-3.1-sonar-small-128k-online` | 8B | 127,072 | Chat Completion |
| `llama-3.1-sonar-large-128k-online` | 70B | 127,072 | Chat Completion |
| `llama-3.1-sonar-huge-128k-online` | 405B | 127,072 | Chat Completion |
| `llama-3.1-sonar-small-128k-chat` | 8B | 131,072 | Chat Completion |
| `llama-3.1-sonar-large-128k-chat` | 70B | 131,072 | Chat Completion |
| `llama-3.1-8b-instruct` | 8B | 131,072 | Chat Completion |
| `llama-3.1-70b-instruct` | 70B | 131,072 | Chat Completion |
| `sonar-pro` | 8B | 200,000 | Chat Completion |
| `sonar` | 8B | 127,072 | Chat Completion |

View file

@ -8,7 +8,12 @@
// copy outputs into the export in ../models.js
// Update the date below if you run this again because Perplexity added new models.
// Last Collected: Sept 12, 2024
// Last Collected: Jan 23, 2025
// UPDATE: Jan 23, 2025
// The table is no longer available on the website, but Perplexity has deprecated the
// old models so now we can just update the chat_models.txt file with the new models
// manually and then run this script to get the new models.
import fs from "fs";