Merge branch 'main' into litellm_fix_azure_function_calling_streaming

This commit is contained in:
Krish Dholakia 2024-02-22 22:36:38 -08:00 committed by GitHub
commit dd4439b6a8
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
23 changed files with 869 additions and 173 deletions

View file

@ -4277,8 +4277,8 @@ def get_optional_params(
optional_params["stop_sequences"] = stop
if max_tokens is not None:
optional_params["max_output_tokens"] = max_tokens
elif custom_llm_provider == "vertex_ai" and model in (
litellm.vertex_chat_models
elif custom_llm_provider == "vertex_ai" and (
model in litellm.vertex_chat_models
or model in litellm.vertex_code_chat_models
or model in litellm.vertex_text_models
or model in litellm.vertex_code_text_models
@ -6827,6 +6827,14 @@ def exception_type(
llm_provider="palm",
response=original_exception.response,
)
if "504 Deadline expired before operation could complete." in error_str:
exception_mapping_worked = True
raise Timeout(
message=f"PalmException - {original_exception.message}",
model=model,
llm_provider="palm",
request=original_exception.request,
)
if "400 Request payload size exceeds" in error_str:
exception_mapping_worked = True
raise ContextWindowExceededError(