diff --git a/litellm/utils.py b/litellm/utils.py index 98461d58b..3ad7ea312 100644 --- a/litellm/utils.py +++ b/litellm/utils.py @@ -6226,7 +6226,7 @@ def get_first_chars_messages(kwargs: dict) -> str: def get_supported_openai_params( model: str, - custom_llm_provider: str, + custom_llm_provider: Optional[str] = None, request_type: Literal["chat_completion", "embeddings"] = "chat_completion", ) -> Optional[list]: """ @@ -6241,6 +6241,8 @@ def get_supported_openai_params( - List if custom_llm_provider is mapped - None if unmapped """ + if not custom_llm_provider: + custom_llm_provider = litellm.get_llm_provider(model=model)[1] if custom_llm_provider == "bedrock": return litellm.AmazonConverseConfig().get_supported_openai_params(model=model) elif custom_llm_provider == "ollama":