fix(utils.py): parse out aws specific params from openai call

Fixes https://github.com/BerriAI/litellm/issues/5009
This commit is contained in:
Krrish Dholakia 2024-08-03 12:04:44 -07:00
parent 5add6687cc
commit ed8b20fa18
2 changed files with 30 additions and 0 deletions

View file

@ -420,3 +420,21 @@ def test_dynamic_drop_additional_params_e2e():
print(mock_response.call_args.kwargs["data"])
assert "response_format" not in mock_response.call_args.kwargs["data"]
assert "additional_drop_params" not in mock_response.call_args.kwargs["data"]
def test_get_optional_params_image_gen():
response = litellm.utils.get_optional_params_image_gen(
aws_region_name="us-east-1", custom_llm_provider="openai"
)
print(response)
assert "aws_region_name" not in response
response = litellm.utils.get_optional_params_image_gen(
aws_region_name="us-east-1", custom_llm_provider="bedrock"
)
print(response)
assert "aws_region_name" in response

View file

@ -2391,6 +2391,18 @@ def get_optional_params_image_gen(
additional_drop_params = passed_params.pop("additional_drop_params", None)
special_params = passed_params.pop("kwargs")
for k, v in special_params.items():
if k.startswith("aws_") and (
custom_llm_provider != "bedrock" and custom_llm_provider != "sagemaker"
): # allow dynamically setting boto3 init logic
continue
elif k == "hf_model_name" and custom_llm_provider != "sagemaker":
continue
elif (
k.startswith("vertex_")
and custom_llm_provider != "vertex_ai"
and custom_llm_provider != "vertex_ai_beta"
): # allow dynamically setting vertex ai init logic
continue
passed_params[k] = v
default_params = {