handle optional params

This commit is contained in:
Rena Lu 2024-02-14 23:27:16 +00:00
parent 02c58a9760
commit 9c45bfdb70

View file

@ -4192,7 +4192,14 @@ def get_optional_params(
optional_params["stop_sequences"] = stop
if max_tokens is not None:
optional_params["max_output_tokens"] = max_tokens
elif custom_llm_provider == "vertex_ai":
elif custom_llm_provider == "vertex_ai" and model in (
litellm.vertex_chat_models
or model in litellm.vertex_code_chat_models
or model in litellm.vertex_text_models
or model in litellm.vertex_code_text_models
or model in litellm.vertex_language_models
or model in litellm.vertex_embedding_models
):
## check if unsupported param passed in
supported_params = [
"temperature",