anything-llm/server/utils/AiProviders/perplexity/models.js
Timothy Carambat 5df6b5f7d9
Bump perplexity models ()
* Added Supported Models Free Tier - chat_models.txt

Need to fill in correct Parameter Count.

* Bump perplexity model
closes 
closes 

---------

Co-authored-by: Tim-Hoekstra <135951177+Tim-Hoekstra@users.noreply.github.com>
2024-07-19 15:11:10 -07:00

39 lines
1 KiB
JavaScript

const MODELS = {
"llama-3-sonar-small-32k-online\\*": {
id: "llama-3-sonar-small-32k-online\\*",
name: "llama-3-sonar-small-32k-online\\*",
maxLength: 28000,
},
"llama-3-sonar-small-32k-chat": {
id: "llama-3-sonar-small-32k-chat",
name: "llama-3-sonar-small-32k-chat",
maxLength: 32768,
},
"llama-3-sonar-large-32k-online\\*": {
id: "llama-3-sonar-large-32k-online\\*",
name: "llama-3-sonar-large-32k-online\\*",
maxLength: 28000,
},
"llama-3-sonar-large-32k-chat": {
id: "llama-3-sonar-large-32k-chat",
name: "llama-3-sonar-large-32k-chat",
maxLength: 32768,
},
"llama-3-8b-instruct": {
id: "llama-3-8b-instruct",
name: "llama-3-8b-instruct",
maxLength: 8192,
},
"llama-3-70b-instruct": {
id: "llama-3-70b-instruct",
name: "llama-3-70b-instruct",
maxLength: 8192,
},
"mixtral-8x7b-instruct": {
id: "mixtral-8x7b-instruct",
name: "mixtral-8x7b-instruct",
maxLength: 16384,
},
};
module.exports.MODELS = MODELS;