feat(utils.py): support global vertex ai safety settings param

This commit is contained in:
Krrish Dholakia 2024-08-21 17:36:59 -07:00
parent 9078f075f9
commit 3c99ad19fa
3 changed files with 44 additions and 7 deletions

View file

@ -339,6 +339,7 @@ api_version = None
organization = None
project = None
config_path = None
vertex_ai_safety_settings: Optional[dict] = None
####### COMPLETION MODELS ###################
open_ai_chat_completion_models: List = []
open_ai_text_completion_models: List = []

View file

@ -19,7 +19,11 @@ from litellm.types.completion import (
ChatCompletionSystemMessageParam,
ChatCompletionUserMessageParam,
)
from litellm.utils import get_optional_params, get_optional_params_embeddings
from litellm.utils import (
get_optional_params,
get_optional_params_embeddings,
get_optional_params_image_gen,
)
## get_optional_params_embeddings
### Models: OpenAI, Azure, Bedrock
@ -430,7 +434,6 @@ def test_get_optional_params_image_gen():
print(response)
assert "aws_region_name" not in response
response = litellm.utils.get_optional_params_image_gen(
aws_region_name="us-east-1", custom_llm_provider="bedrock"
)
@ -463,3 +466,36 @@ def test_get_optional_params_num_retries():
print(f"mock_client.call_args: {mock_client.call_args}")
assert mock_client.call_args.kwargs["max_retries"] == 10
@pytest.mark.parametrize(
"provider",
[
"vertex_ai",
"vertex_ai_beta",
],
)
def test_vertex_safety_settings(provider):
litellm.vertex_ai_safety_settings = [
{
"category": "HARM_CATEGORY_HARASSMENT",
"threshold": "BLOCK_NONE",
},
{
"category": "HARM_CATEGORY_HATE_SPEECH",
"threshold": "BLOCK_NONE",
},
{
"category": "HARM_CATEGORY_SEXUALLY_EXPLICIT",
"threshold": "BLOCK_NONE",
},
{
"category": "HARM_CATEGORY_DANGEROUS_CONTENT",
"threshold": "BLOCK_NONE",
},
]
optional_params = get_optional_params(
model="gemini-1.5-pro", custom_llm_provider=provider
)
assert len(optional_params) == 1

View file

@ -3145,7 +3145,6 @@ def get_optional_params(
or model in litellm.vertex_embedding_models
or model in litellm.vertex_vision_models
):
print_verbose(f"(start) INSIDE THE VERTEX AI OPTIONAL PARAM BLOCK")
## check if unsupported param passed in
supported_params = get_supported_openai_params(
model=model, custom_llm_provider=custom_llm_provider
@ -3157,9 +3156,8 @@ def get_optional_params(
optional_params=optional_params,
)
print_verbose(
f"(end) INSIDE THE VERTEX AI OPTIONAL PARAM BLOCK - optional_params: {optional_params}"
)
if litellm.vertex_ai_safety_settings is not None:
optional_params["safety_settings"] = litellm.vertex_ai_safety_settings
elif custom_llm_provider == "gemini":
supported_params = get_supported_openai_params(
model=model, custom_llm_provider=custom_llm_provider
@ -3170,7 +3168,7 @@ def get_optional_params(
optional_params=optional_params,
model=model,
)
elif custom_llm_provider == "vertex_ai_beta" or custom_llm_provider == "gemini":
elif custom_llm_provider == "vertex_ai_beta":
supported_params = get_supported_openai_params(
model=model, custom_llm_provider=custom_llm_provider
)
@ -3185,6 +3183,8 @@ def get_optional_params(
else False
),
)
if litellm.vertex_ai_safety_settings is not None:
optional_params["safety_settings"] = litellm.vertex_ai_safety_settings
elif (
custom_llm_provider == "vertex_ai" and model in litellm.vertex_anthropic_models
):