diff --git a/litellm/model_prices_and_context_window_backup.json b/litellm/model_prices_and_context_window_backup.json index 428d95589..667745c30 100644 --- a/litellm/model_prices_and_context_window_backup.json +++ b/litellm/model_prices_and_context_window_backup.json @@ -893,11 +893,11 @@ "mode": "chat" }, "mistral/mistral-large-latest": { - "max_tokens": 8191, - "max_input_tokens": 32000, - "max_output_tokens": 8191, - "input_cost_per_token": 0.000004, - "output_cost_per_token": 0.000012, + "max_tokens": 128000, + "max_input_tokens": 128000, + "max_output_tokens": 128000, + "input_cost_per_token": 0.000003, + "output_cost_per_token": 0.000009, "litellm_provider": "mistral", "mode": "chat", "supports_function_calling": true @@ -912,6 +912,16 @@ "mode": "chat", "supports_function_calling": true }, + "mistral/mistral-large-2407": { + "max_tokens": 128000, + "max_input_tokens": 128000, + "max_output_tokens": 128000, + "input_cost_per_token": 0.000003, + "output_cost_per_token": 0.000009, + "litellm_provider": "mistral", + "mode": "chat", + "supports_function_calling": true + }, "mistral/open-mistral-7b": { "max_tokens": 8191, "max_input_tokens": 32000, diff --git a/model_prices_and_context_window.json b/model_prices_and_context_window.json index 428d95589..667745c30 100644 --- a/model_prices_and_context_window.json +++ b/model_prices_and_context_window.json @@ -893,11 +893,11 @@ "mode": "chat" }, "mistral/mistral-large-latest": { - "max_tokens": 8191, - "max_input_tokens": 32000, - "max_output_tokens": 8191, - "input_cost_per_token": 0.000004, - "output_cost_per_token": 0.000012, + "max_tokens": 128000, + "max_input_tokens": 128000, + "max_output_tokens": 128000, + "input_cost_per_token": 0.000003, + "output_cost_per_token": 0.000009, "litellm_provider": "mistral", "mode": "chat", "supports_function_calling": true @@ -912,6 +912,16 @@ "mode": "chat", "supports_function_calling": true }, + "mistral/mistral-large-2407": { + "max_tokens": 128000, + "max_input_tokens": 128000, + "max_output_tokens": 128000, + "input_cost_per_token": 0.000003, + "output_cost_per_token": 0.000009, + "litellm_provider": "mistral", + "mode": "chat", + "supports_function_calling": true + }, "mistral/open-mistral-7b": { "max_tokens": 8191, "max_input_tokens": 32000,