From 08fd41d43d6509b1f482a9360dbf51f2215f5afd Mon Sep 17 00:00:00 2001 From: ishaan-jaff Date: Wed, 29 Nov 2023 21:15:34 -0800 Subject: [PATCH] (feat) add pplx-online llms --- model_prices_and_context_window.json | 23 ++++++++++++++++++++++- 1 file changed, 22 insertions(+), 1 deletion(-) diff --git a/model_prices_and_context_window.json b/model_prices_and_context_window.json index ae4e49d45..220eaedcd 100644 --- a/model_prices_and_context_window.json +++ b/model_prices_and_context_window.json @@ -818,13 +818,34 @@ "litellm_provider": "deepinfra", "mode": "chat" }, - "perplexity/codellama-34b-instruct": { + "perplexity/pplx-7b-chat": { + "max_tokens": 8192, + "input_cost_per_token": 0.0000000, + "output_cost_per_token": 0.000000, + "litellm_provider": "perplexity", + "mode": "chat" + }, + "perplexity/pplx-70b-chat": { "max_tokens": 4096, "input_cost_per_token": 0.0000000, "output_cost_per_token": 0.000000, "litellm_provider": "perplexity", "mode": "chat" }, + "perplexity/pplx-7b-online": { + "max_tokens": 4096, + "input_cost_per_token": 0.0000000, + "output_cost_per_token": 0.0005, + "litellm_provider": "perplexity", + "mode": "chat" + }, + "perplexity/pplx-70b-online": { + "max_tokens": 4096, + "input_cost_per_token": 0.0000000, + "output_cost_per_token": 0.0005, + "litellm_provider": "perplexity", + "mode": "chat" + }, "perplexity/llama-2-13b-chat": { "max_tokens": 4096, "input_cost_per_token": 0.0000000,