diff --git a/litellm/model_prices_and_context_window_backup.json b/litellm/model_prices_and_context_window_backup.json index 156717f076..ca75e37088 100644 --- a/litellm/model_prices_and_context_window_backup.json +++ b/litellm/model_prices_and_context_window_backup.json @@ -6606,6 +6606,17 @@ "litellm_provider": "deepinfra", "mode": "chat" }, + "deepinfra/meta-llama/Meta-Llama-3.1-405B-Instruct": { + "max_tokens": 32768, + "max_input_tokens": 32768, + "max_output_tokens": 32768, + "input_cost_per_token": 0.0000009, + "output_cost_per_token": 0.0000009, + "litellm_provider": "deepinfra", + "mode": "chat", + "supports_function_calling": true, + "supports_parallel_function_calling": true + }, "deepinfra/01-ai/Yi-34B-200K": { "max_tokens": 4096, "max_input_tokens": 200000, diff --git a/model_prices_and_context_window.json b/model_prices_and_context_window.json index 8079a9bc45..ca75e37088 100644 --- a/model_prices_and_context_window.json +++ b/model_prices_and_context_window.json @@ -6615,7 +6615,7 @@ "litellm_provider": "deepinfra", "mode": "chat", "supports_function_calling": true, - "supports_parallel_function_calling": true, + "supports_parallel_function_calling": true }, "deepinfra/01-ai/Yi-34B-200K": { "max_tokens": 4096,