mirror of
https://github.com/BerriAI/litellm.git
synced 2025-04-25 10:44:24 +00:00
Merge pull request #3828 from BerriAI/litellm_outage_alerting
fix(slack_alerting.py): support region based outage alerting
This commit is contained in:
commit
d25ed9c4d3
9 changed files with 414 additions and 78 deletions
|
@ -6298,7 +6298,9 @@ def get_model_region(
|
|||
return None
|
||||
|
||||
|
||||
def get_api_base(model: str, optional_params: dict) -> Optional[str]:
|
||||
def get_api_base(
|
||||
model: str, optional_params: Union[dict, LiteLLM_Params]
|
||||
) -> Optional[str]:
|
||||
"""
|
||||
Returns the api base used for calling the model.
|
||||
|
||||
|
@ -6318,7 +6320,9 @@ def get_api_base(model: str, optional_params: dict) -> Optional[str]:
|
|||
"""
|
||||
|
||||
try:
|
||||
if "model" in optional_params:
|
||||
if isinstance(optional_params, LiteLLM_Params):
|
||||
_optional_params = optional_params
|
||||
elif "model" in optional_params:
|
||||
_optional_params = LiteLLM_Params(**optional_params)
|
||||
else: # prevent needing to copy and pop the dict
|
||||
_optional_params = LiteLLM_Params(
|
||||
|
@ -6711,6 +6715,8 @@ def get_llm_provider(
|
|||
Returns the provider for a given model name - e.g. 'azure/chatgpt-v-2' -> 'azure'
|
||||
|
||||
For router -> Can also give the whole litellm param dict -> this function will extract the relevant details
|
||||
|
||||
Raises Error - if unable to map model to a provider
|
||||
"""
|
||||
try:
|
||||
## IF LITELLM PARAMS GIVEN ##
|
||||
|
@ -8644,7 +8650,16 @@ def exception_type(
|
|||
)
|
||||
elif hasattr(original_exception, "status_code"):
|
||||
exception_mapping_worked = True
|
||||
if original_exception.status_code == 401:
|
||||
if original_exception.status_code == 400:
|
||||
exception_mapping_worked = True
|
||||
raise BadRequestError(
|
||||
message=f"{exception_provider} - {message}",
|
||||
llm_provider=custom_llm_provider,
|
||||
model=model,
|
||||
response=original_exception.response,
|
||||
litellm_debug_info=extra_information,
|
||||
)
|
||||
elif original_exception.status_code == 401:
|
||||
exception_mapping_worked = True
|
||||
raise AuthenticationError(
|
||||
message=f"{exception_provider} - {message}",
|
||||
|
@ -9157,6 +9172,7 @@ def exception_type(
|
|||
),
|
||||
),
|
||||
)
|
||||
|
||||
if hasattr(original_exception, "status_code"):
|
||||
if original_exception.status_code == 400:
|
||||
exception_mapping_worked = True
|
||||
|
@ -9837,7 +9853,16 @@ def exception_type(
|
|||
)
|
||||
elif hasattr(original_exception, "status_code"):
|
||||
exception_mapping_worked = True
|
||||
if original_exception.status_code == 401:
|
||||
if original_exception.status_code == 400:
|
||||
exception_mapping_worked = True
|
||||
raise BadRequestError(
|
||||
message=f"AzureException - {original_exception.message}",
|
||||
llm_provider="azure",
|
||||
model=model,
|
||||
litellm_debug_info=extra_information,
|
||||
response=original_exception.response,
|
||||
)
|
||||
elif original_exception.status_code == 401:
|
||||
exception_mapping_worked = True
|
||||
raise AuthenticationError(
|
||||
message=f"AzureException - {original_exception.message}",
|
||||
|
@ -9854,7 +9879,7 @@ def exception_type(
|
|||
litellm_debug_info=extra_information,
|
||||
llm_provider="azure",
|
||||
)
|
||||
if original_exception.status_code == 422:
|
||||
elif original_exception.status_code == 422:
|
||||
exception_mapping_worked = True
|
||||
raise BadRequestError(
|
||||
message=f"AzureException - {original_exception.message}",
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue