mirror of
https://github.com/BerriAI/litellm.git
synced 2025-04-25 18:54:30 +00:00
Added handling of unmapped provider, with test
This commit is contained in:
parent
8be37bee04
commit
ef6e920161
2 changed files with 8 additions and 1 deletions
|
@ -390,4 +390,8 @@ def test_get_max_token_unit_test():
|
||||||
|
|
||||||
|
|
||||||
def test_get_supported_openai_params() -> None:
|
def test_get_supported_openai_params() -> None:
|
||||||
|
# Mapped provider
|
||||||
assert isinstance(get_supported_openai_params("gpt-4"), list)
|
assert isinstance(get_supported_openai_params("gpt-4"), list)
|
||||||
|
|
||||||
|
# Unmapped provider
|
||||||
|
assert get_supported_openai_params("nonexistent") is None
|
||||||
|
|
|
@ -6242,7 +6242,10 @@ def get_supported_openai_params(
|
||||||
- None if unmapped
|
- None if unmapped
|
||||||
"""
|
"""
|
||||||
if not custom_llm_provider:
|
if not custom_llm_provider:
|
||||||
custom_llm_provider = litellm.get_llm_provider(model=model)[1]
|
try:
|
||||||
|
custom_llm_provider = litellm.get_llm_provider(model=model)[1]
|
||||||
|
except BadRequestError:
|
||||||
|
return None
|
||||||
if custom_llm_provider == "bedrock":
|
if custom_llm_provider == "bedrock":
|
||||||
return litellm.AmazonConverseConfig().get_supported_openai_params(model=model)
|
return litellm.AmazonConverseConfig().get_supported_openai_params(model=model)
|
||||||
elif custom_llm_provider == "ollama":
|
elif custom_llm_provider == "ollama":
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue