feat - include model name in cool down alerts

This commit is contained in:
Ishaan Jaff 2024-05-16 12:52:15 -07:00
parent 3351c5f11d
commit d16a6c03a2
2 changed files with 3 additions and 3 deletions

View file

@ -1076,7 +1076,7 @@ async def user_api_key_auth(
if not _is_user_proxy_admin(user_id_information): # if non-admin if not _is_user_proxy_admin(user_id_information): # if non-admin
if route in LiteLLMRoutes.openai_routes.value: if route in LiteLLMRoutes.openai_routes.value:
pass pass
elif request['route'].name in LiteLLMRoutes.openai_route_names.value: elif request["route"].name in LiteLLMRoutes.openai_route_names.value:
pass pass
elif ( elif (
route in LiteLLMRoutes.info_routes.value route in LiteLLMRoutes.info_routes.value
@ -4976,7 +4976,7 @@ async def update_key_fn(request: Request, data: UpdateKeyRequest):
if "duration" in non_default_values: if "duration" in non_default_values:
duration = non_default_values.pop("duration") duration = non_default_values.pop("duration")
duration_s = _duration_in_seconds(duration=duration) duration_s = _duration_in_seconds(duration=duration)
expires = datetime.datetime.now(timezone.utc) + timedelta(seconds=duration_s) expires = datetime.now(timezone.utc) + timedelta(seconds=duration_s)
non_default_values["expires"] = expires non_default_values["expires"] = expires
response = await prisma_client.update_data( response = await prisma_client.update_data(

View file

@ -3775,7 +3775,7 @@ class Router:
) )
asyncio.create_task( asyncio.create_task(
proxy_logging_obj.slack_alerting_instance.send_alert( proxy_logging_obj.slack_alerting_instance.send_alert(
message=f"Router: Cooling down deployment: {_api_base}, for {self.cooldown_time} seconds. Got exception: {str(exception_status)}. Change 'cooldown_time' + 'allowed_fails' under 'Router Settings' on proxy UI, or via config - https://docs.litellm.ai/docs/proxy/reliability#fallbacks--retries--timeouts--cooldowns", message=f"Router: Cooling down Deployment:\nModel Name: {_model_name}\nAPI Base: {_api_base}\n{self.cooldown_time} seconds. Got exception: {str(exception_status)}. Change 'cooldown_time' + 'allowed_fails' under 'Router Settings' on proxy UI, or via config - https://docs.litellm.ai/docs/proxy/reliability#fallbacks--retries--timeouts--cooldowns",
alert_type="cooldown_deployment", alert_type="cooldown_deployment",
level="Low", level="Low",
) )