diff --git a/litellm/tests/test_utils.py b/litellm/tests/test_utils.py index 9f2e99a805..491283eeda 100644 --- a/litellm/tests/test_utils.py +++ b/litellm/tests/test_utils.py @@ -390,4 +390,8 @@ def test_get_max_token_unit_test(): def test_get_supported_openai_params() -> None: + # Mapped provider assert isinstance(get_supported_openai_params("gpt-4"), list) + + # Unmapped provider + assert get_supported_openai_params("nonexistent") is None diff --git a/litellm/utils.py b/litellm/utils.py index 3ad7ea312e..3595ca0bd1 100644 --- a/litellm/utils.py +++ b/litellm/utils.py @@ -6242,7 +6242,10 @@ def get_supported_openai_params( - None if unmapped """ if not custom_llm_provider: - custom_llm_provider = litellm.get_llm_provider(model=model)[1] + try: + custom_llm_provider = litellm.get_llm_provider(model=model)[1] + except BadRequestError: + return None if custom_llm_provider == "bedrock": return litellm.AmazonConverseConfig().get_supported_openai_params(model=model) elif custom_llm_provider == "ollama":