Merge branch 'main' into litellm_azure_batch_apis

This commit is contained in:
Krish Dholakia 2024-08-22 19:07:54 -07:00 committed by GitHub
commit 76b3db334b
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
38 changed files with 1078 additions and 159 deletions

View file

@ -1588,7 +1588,7 @@ class ProxyConfig:
verbose_proxy_logger.debug( # noqa
f"{blue_color_code}Set Cache on LiteLLM Proxy: {vars(litellm.cache.cache)}{reset_color_code}"
)
elif key == "cache" and value == False:
elif key == "cache" and value is False:
pass
elif key == "guardrails":
if premium_user is not True:
@ -2672,6 +2672,13 @@ def giveup(e):
and isinstance(e.message, str)
and "Max parallel request limit reached" in e.message
)
if (
general_settings.get("disable_retry_on_max_parallel_request_limit_error")
is True
):
return True # giveup if queuing max parallel request limits is disabled
if result:
verbose_proxy_logger.info(json.dumps({"event": "giveup", "exception": str(e)}))
return result