Update prices/context windows for Perplexity Llama 3.1 models

This commit is contained in:
Aaron Bach 2024-08-14 13:20:22 -06:00
parent 066ed20eb0
commit 9d42dfb417

View file

@ -4531,6 +4531,69 @@
"litellm_provider": "perplexity", "litellm_provider": "perplexity",
"mode": "chat" "mode": "chat"
}, },
"perplexity/llama-3.1-70b-instruct": {
"max_tokens": 131072,
"max_input_tokens": 131072,
"max_output_tokens": 131072,
"input_cost_per_token": 0.000001,
"output_cost_per_token": 0.000001,
"litellm_provider": "perplexity",
"mode": "chat"
},
"perplexity/llama-3.1-8b-instruct": {
"max_tokens": 131072,
"max_input_tokens": 131072,
"max_output_tokens": 131072,
"input_cost_per_token": 0.0000002,
"output_cost_per_token": 0.0000002,
"litellm_provider": "perplexity",
"mode": "chat"
},
"perplexity/llama-3.1-sonar-huge-128k-online": {
"max_tokens": 127072,
"max_input_tokens": 127072,
"max_output_tokens": 127072,
"input_cost_per_token": 0.000005,
"output_cost_per_token": 0.000005,
"litellm_provider": "perplexity",
"mode": "chat"
},
"perplexity/llama-3.1-sonar-large-128k-online": {
"max_tokens": 127072,
"max_input_tokens": 127072,
"max_output_tokens": 127072,
"input_cost_per_token": 0.000001,
"output_cost_per_token": 0.000001,
"litellm_provider": "perplexity",
"mode": "chat"
},
"perplexity/llama-3.1-sonar-large-128k-chat": {
"max_tokens": 131072,
"max_input_tokens": 131072,
"max_output_tokens": 131072,
"input_cost_per_token": 0.000001,
"output_cost_per_token": 0.000001,
"litellm_provider": "perplexity",
"mode": "chat"
},
"perplexity/llama-3.1-sonar-small-128k-chat": {
"max_tokens": 131072,
"max_input_tokens": 131072,
"max_output_tokens": 131072,
"input_cost_per_token": 0.0000002,
"output_cost_per_token": 0.0000002,
"litellm_provider": "perplexity",
"mode": "chat"
},
"perplexity/llama-3.1-sonar-small-128k-online": {
"max_tokens": 127072,
"max_input_tokens": 127072,
"max_output_tokens": 127072,
"input_cost_per_token": 0.0000002,
"output_cost_per_token": 0.0000002,
"litellm_provider": "perplexity",
"mode": "chat"
},
"perplexity/pplx-7b-chat": { "perplexity/pplx-7b-chat": {
"max_tokens": 8192, "max_tokens": 8192,
"max_input_tokens": 8192, "max_input_tokens": 8192,