From 51ffbbb3fafc735b7b6b98d4d755d1b02470286c Mon Sep 17 00:00:00 2001 From: Ishaan Jaff Date: Tue, 6 Aug 2024 11:39:20 -0700 Subject: [PATCH] fix pricing --- litellm/model_prices_and_context_window_backup.json | 2 +- model_prices_and_context_window.json | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/litellm/model_prices_and_context_window_backup.json b/litellm/model_prices_and_context_window_backup.json index 392add416..98b0045ae 100644 --- a/litellm/model_prices_and_context_window_backup.json +++ b/litellm/model_prices_and_context_window_backup.json @@ -73,7 +73,7 @@ "max_tokens": 4096, "max_input_tokens": 128000, "max_output_tokens": 4096, - "input_cost_per_token": 0.00000025, + "input_cost_per_token": 0.0000025, "output_cost_per_token": 0.000010, "litellm_provider": "openai", "mode": "chat", diff --git a/model_prices_and_context_window.json b/model_prices_and_context_window.json index 392add416..98b0045ae 100644 --- a/model_prices_and_context_window.json +++ b/model_prices_and_context_window.json @@ -73,7 +73,7 @@ "max_tokens": 4096, "max_input_tokens": 128000, "max_output_tokens": 4096, - "input_cost_per_token": 0.00000025, + "input_cost_per_token": 0.0000025, "output_cost_per_token": 0.000010, "litellm_provider": "openai", "mode": "chat",