fix - error str in OpenAI, Azure exception

This commit is contained in:
Ishaan Jaff 2024-06-29 13:11:55 -07:00
parent e1f84b1bd9
commit a6bc878a2a
2 changed files with 69 additions and 18 deletions

View file

@ -0,0 +1,40 @@
import json
from typing import Optional
def get_error_message(error_obj) -> Optional[str]:
"""
OpenAI Returns Error message that is nested, this extract the message
Example:
{
'request': "<Request('POST', 'https://api.openai.com/v1/chat/completions')>",
'message': "Error code: 400 - {\'error\': {\'message\': \"Invalid 'temperature': decimal above maximum value. Expected a value <= 2, but got 200 instead.\", 'type': 'invalid_request_error', 'param': 'temperature', 'code': 'decimal_above_max_value'}}",
'body': {
'message': "Invalid 'temperature': decimal above maximum value. Expected a value <= 2, but got 200 instead.",
'type': 'invalid_request_error',
'param': 'temperature',
'code': 'decimal_above_max_value'
},
'code': 'decimal_above_max_value',
'param': 'temperature',
'type': 'invalid_request_error',
'response': "<Response [400 Bad Request]>",
'status_code': 400,
'request_id': 'req_f287898caa6364cd42bc01355f74dd2a'
}
"""
try:
# First, try to access the message directly from the 'body' key
if error_obj is None:
return None
if hasattr(error_obj, "body"):
_error_obj_body = getattr(error_obj, "body")
if isinstance(_error_obj_body, dict):
return _error_obj_body.get("message")
# If all else fails, return None
return None
except Exception as e:
return None

View file

@ -50,6 +50,7 @@ import litellm._service_logger # for storing API inputs, outputs, and metadata
import litellm.litellm_core_utils
from litellm.caching import DualCache
from litellm.litellm_core_utils.core_helpers import map_finish_reason
from litellm.litellm_core_utils.exception_mapping_utils import get_error_message
from litellm.litellm_core_utils.llm_request_utils import _ensure_extra_body_is_safe
from litellm.litellm_core_utils.redact_messages import (
redact_message_input_output_from_logging,
@ -5824,10 +5825,13 @@ def exception_type(
or custom_llm_provider in litellm.openai_compatible_providers
):
# custom_llm_provider is openai, make it OpenAI
if hasattr(original_exception, "message"):
message = original_exception.message
else:
message = str(original_exception)
message = get_error_message(error_obj=original_exception)
if message is None:
if hasattr(original_exception, "message"):
message = original_exception.message
else:
message = str(original_exception)
if message is not None and isinstance(message, str):
message = message.replace("OPENAI", custom_llm_provider.upper())
message = message.replace("openai", custom_llm_provider)
@ -7280,10 +7284,17 @@ def exception_type(
request=original_exception.request,
)
elif custom_llm_provider == "azure":
message = get_error_message(error_obj=original_exception)
if message is None:
if hasattr(original_exception, "message"):
message = original_exception.message
else:
message = str(original_exception)
if "Internal server error" in error_str:
exception_mapping_worked = True
raise litellm.InternalServerError(
message=f"AzureException Internal server error - {original_exception.message}",
message=f"AzureException Internal server error - {message}",
llm_provider="azure",
model=model,
litellm_debug_info=extra_information,
@ -7296,7 +7307,7 @@ def exception_type(
elif "This model's maximum context length is" in error_str:
exception_mapping_worked = True
raise ContextWindowExceededError(
message=f"AzureException ContextWindowExceededError - {original_exception.message}",
message=f"AzureException ContextWindowExceededError - {message}",
llm_provider="azure",
model=model,
litellm_debug_info=extra_information,
@ -7305,7 +7316,7 @@ def exception_type(
elif "DeploymentNotFound" in error_str:
exception_mapping_worked = True
raise NotFoundError(
message=f"AzureException NotFoundError - {original_exception.message}",
message=f"AzureException NotFoundError - {message}",
llm_provider="azure",
model=model,
litellm_debug_info=extra_information,
@ -7325,7 +7336,7 @@ def exception_type(
):
exception_mapping_worked = True
raise ContentPolicyViolationError(
message=f"litellm.ContentPolicyViolationError: AzureException - {original_exception.message}",
message=f"litellm.ContentPolicyViolationError: AzureException - {message}",
llm_provider="azure",
model=model,
litellm_debug_info=extra_information,
@ -7334,7 +7345,7 @@ def exception_type(
elif "invalid_request_error" in error_str:
exception_mapping_worked = True
raise BadRequestError(
message=f"AzureException BadRequestError - {original_exception.message}",
message=f"AzureException BadRequestError - {message}",
llm_provider="azure",
model=model,
litellm_debug_info=extra_information,
@ -7346,7 +7357,7 @@ def exception_type(
):
exception_mapping_worked = True
raise AuthenticationError(
message=f"{exception_provider} AuthenticationError - {original_exception.message}",
message=f"{exception_provider} AuthenticationError - {message}",
llm_provider=custom_llm_provider,
model=model,
litellm_debug_info=extra_information,
@ -7357,7 +7368,7 @@ def exception_type(
if original_exception.status_code == 400:
exception_mapping_worked = True
raise BadRequestError(
message=f"AzureException - {original_exception.message}",
message=f"AzureException - {message}",
llm_provider="azure",
model=model,
litellm_debug_info=extra_information,
@ -7366,7 +7377,7 @@ def exception_type(
elif original_exception.status_code == 401:
exception_mapping_worked = True
raise AuthenticationError(
message=f"AzureException AuthenticationError - {original_exception.message}",
message=f"AzureException AuthenticationError - {message}",
llm_provider="azure",
model=model,
litellm_debug_info=extra_information,
@ -7375,7 +7386,7 @@ def exception_type(
elif original_exception.status_code == 408:
exception_mapping_worked = True
raise Timeout(
message=f"AzureException Timeout - {original_exception.message}",
message=f"AzureException Timeout - {message}",
model=model,
litellm_debug_info=extra_information,
llm_provider="azure",
@ -7383,7 +7394,7 @@ def exception_type(
elif original_exception.status_code == 422:
exception_mapping_worked = True
raise BadRequestError(
message=f"AzureException BadRequestError - {original_exception.message}",
message=f"AzureException BadRequestError - {message}",
model=model,
llm_provider="azure",
litellm_debug_info=extra_information,
@ -7392,7 +7403,7 @@ def exception_type(
elif original_exception.status_code == 429:
exception_mapping_worked = True
raise RateLimitError(
message=f"AzureException RateLimitError - {original_exception.message}",
message=f"AzureException RateLimitError - {message}",
model=model,
llm_provider="azure",
litellm_debug_info=extra_information,
@ -7401,7 +7412,7 @@ def exception_type(
elif original_exception.status_code == 503:
exception_mapping_worked = True
raise ServiceUnavailableError(
message=f"AzureException ServiceUnavailableError - {original_exception.message}",
message=f"AzureException ServiceUnavailableError - {message}",
model=model,
llm_provider="azure",
litellm_debug_info=extra_information,
@ -7410,7 +7421,7 @@ def exception_type(
elif original_exception.status_code == 504: # gateway timeout error
exception_mapping_worked = True
raise Timeout(
message=f"AzureException Timeout - {original_exception.message}",
message=f"AzureException Timeout - {message}",
model=model,
litellm_debug_info=extra_information,
llm_provider="azure",
@ -7419,7 +7430,7 @@ def exception_type(
exception_mapping_worked = True
raise APIError(
status_code=original_exception.status_code,
message=f"AzureException APIError - {original_exception.message}",
message=f"AzureException APIError - {message}",
llm_provider="azure",
litellm_debug_info=extra_information,
model=model,