Update Perplexity models in model_prices_and_context_window.json

According to https://docs.perplexity.ai/docs/model-cards
This commit is contained in:
Toni Engelhardt 2024-02-05 16:46:10 +00:00 committed by GitHub
parent 109ccf4cef
commit e832492423
No known key found for this signature in database
GPG key ID: B5690EEEBB952194

View file

@ -1769,71 +1769,71 @@
"litellm_provider": "deepinfra", "litellm_provider": "deepinfra",
"mode": "chat" "mode": "chat"
}, },
"perplexity/pplx-7b-chat": { "perplexity/codellama-34b-instruct": {
"max_tokens": 16384,
"input_cost_per_token": 0.00000035,
"output_cost_per_token": 0.00000140,
"litellm_provider": "perplexity",
"mode": "chat"
},
"perplexity/codellama-70b-instruct": {
"max_tokens": 16384,
"input_cost_per_token": 0.00000070,
"output_cost_per_token": 0.00000280,
"litellm_provider": "perplexity",
"mode": "chat"
},
"perplexity/pplx-7b-chat": {
"max_tokens": 8192, "max_tokens": 8192,
"input_cost_per_token": 0.00000007, "input_cost_per_token": 0.00000007,
"output_cost_per_token": 0.00000028, "output_cost_per_token": 0.00000028,
"litellm_provider": "perplexity", "litellm_provider": "perplexity",
"mode": "chat" "mode": "chat"
}, },
"perplexity/pplx-34b-chat": { "perplexity/pplx-70b-chat": {
"max_tokens": 8192,
"input_cost_per_token": 0.00000035,
"output_cost_per_token": 0.00000140,
"litellm_provider": "perplexity",
"mode": "chat"
},
"perplexity/pplx-70b-chat": {
"max_tokens": 4096, "max_tokens": 4096,
"input_cost_per_token": 0.00000070, "input_cost_per_token": 0.00000070,
"output_cost_per_token": 0.00000280, "output_cost_per_token": 0.00000280,
"litellm_provider": "perplexity", "litellm_provider": "perplexity",
"mode": "chat" "mode": "chat"
}, },
"perplexity/pplx-7b-online": { "perplexity/pplx-7b-online": {
"max_tokens": 4096, "max_tokens": 4096,
"input_cost_per_token": 0.0000000, "input_cost_per_token": 0.0000000,
"output_cost_per_token": 0.00000028, "output_cost_per_token": 0.00000028,
"input_cost_per_request": 0.005, "input_cost_per_request": 0.005,
"litellm_provider": "perplexity", "litellm_provider": "perplexity",
"mode": "chat" "mode": "chat"
}, },
"perplexity/pplx-70b-online": { "perplexity/pplx-70b-online": {
"max_tokens": 4096, "max_tokens": 4096,
"input_cost_per_token": 0.0000000, "input_cost_per_token": 0.0000000,
"output_cost_per_token": 0.00000280, "output_cost_per_token": 0.00000280,
"input_cost_per_request": 0.005, "input_cost_per_request": 0.005,
"litellm_provider": "perplexity", "litellm_provider": "perplexity",
"mode": "chat" "mode": "chat"
}, },
"perplexity/llama-2-13b-chat": { "perplexity/llama-2-70b-chat": {
"max_tokens": 4096, "max_tokens": 4096,
"input_cost_per_token": 0.0000000, "input_cost_per_token": 0.00000070,
"output_cost_per_token": 0.000000, "output_cost_per_token": 0.00000280,
"litellm_provider": "perplexity", "litellm_provider": "perplexity",
"mode": "chat" "mode": "chat"
}, },
"perplexity/llama-2-70b-chat": { "perplexity/mistral-7b-instruct": {
"max_tokens": 4096, "max_tokens": 4096,
"input_cost_per_token": 0.0000000, "input_cost_per_token": 0.00000007,
"output_cost_per_token": 0.000000, "output_cost_per_token": 0.00000028,
"litellm_provider": "perplexity", "litellm_provider": "perplexity",
"mode": "chat" "mode": "chat"
}, },
"perplexity/mistral-7b-instruct": { "perplexity/mixtral-8x7b-instruct": {
"max_tokens": 4096, "max_tokens": 4096,
"input_cost_per_token": 0.0000000, "input_cost_per_token": 0.00000007,
"output_cost_per_token": 0.000000, "output_cost_per_token": 0.00000028,
"litellm_provider": "perplexity", "litellm_provider": "perplexity",
"mode": "chat" "mode": "chat"
}, },
"perplexity/replit-code-v1.5-3b": {
"max_tokens": 4096,
"input_cost_per_token": 0.0000000,
"output_cost_per_token": 0.000000,
"litellm_provider": "perplexity",
"mode": "chat"
},
"anyscale/mistralai/Mistral-7B-Instruct-v0.1": { "anyscale/mistralai/Mistral-7B-Instruct-v0.1": {
"max_tokens": 16384, "max_tokens": 16384,
"input_cost_per_token": 0.00000015, "input_cost_per_token": 0.00000015,