diff --git a/litellm/model_prices_and_context_window_backup.json b/litellm/model_prices_and_context_window_backup.json index 7faa3b275..c3649ac4f 100644 --- a/litellm/model_prices_and_context_window_backup.json +++ b/litellm/model_prices_and_context_window_backup.json @@ -2046,7 +2046,16 @@ "input_cost_per_token": 0.00000015, "output_cost_per_token": 0.00000015, "litellm_provider": "anyscale", - "mode": "chat" + "mode": "chat", + "supports_function_calling": true + }, + "anyscale/Mixtral-8x7B-Instruct-v0.1": { + "max_tokens": 16384, + "input_cost_per_token": 0.00000015, + "output_cost_per_token": 0.00000015, + "litellm_provider": "anyscale", + "mode": "chat", + "supports_function_calling": true }, "anyscale/HuggingFaceH4/zephyr-7b-beta": { "max_tokens": 16384, diff --git a/model_prices_and_context_window.json b/model_prices_and_context_window.json index 7faa3b275..c3649ac4f 100644 --- a/model_prices_and_context_window.json +++ b/model_prices_and_context_window.json @@ -2046,7 +2046,16 @@ "input_cost_per_token": 0.00000015, "output_cost_per_token": 0.00000015, "litellm_provider": "anyscale", - "mode": "chat" + "mode": "chat", + "supports_function_calling": true + }, + "anyscale/Mixtral-8x7B-Instruct-v0.1": { + "max_tokens": 16384, + "input_cost_per_token": 0.00000015, + "output_cost_per_token": 0.00000015, + "litellm_provider": "anyscale", + "mode": "chat", + "supports_function_calling": true }, "anyscale/HuggingFaceH4/zephyr-7b-beta": { "max_tokens": 16384,