mirror of
https://github.com/BerriAI/litellm.git
synced 2025-04-25 18:54:30 +00:00
handle optional params
This commit is contained in:
parent
02c58a9760
commit
9c45bfdb70
1 changed files with 8 additions and 1 deletions
|
@ -4192,7 +4192,14 @@ def get_optional_params(
|
||||||
optional_params["stop_sequences"] = stop
|
optional_params["stop_sequences"] = stop
|
||||||
if max_tokens is not None:
|
if max_tokens is not None:
|
||||||
optional_params["max_output_tokens"] = max_tokens
|
optional_params["max_output_tokens"] = max_tokens
|
||||||
elif custom_llm_provider == "vertex_ai":
|
elif custom_llm_provider == "vertex_ai" and model in (
|
||||||
|
litellm.vertex_chat_models
|
||||||
|
or model in litellm.vertex_code_chat_models
|
||||||
|
or model in litellm.vertex_text_models
|
||||||
|
or model in litellm.vertex_code_text_models
|
||||||
|
or model in litellm.vertex_language_models
|
||||||
|
or model in litellm.vertex_embedding_models
|
||||||
|
):
|
||||||
## check if unsupported param passed in
|
## check if unsupported param passed in
|
||||||
supported_params = [
|
supported_params = [
|
||||||
"temperature",
|
"temperature",
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue