feat - include litellm exception type when raising exception

This commit is contained in:
Ishaan Jaff 2024-05-24 10:45:37 -07:00
parent 8616f12d29
commit 2b85d0faf9

View file

@ -8555,7 +8555,7 @@ def exception_type(
if "This model's maximum context length is" in error_str: if "This model's maximum context length is" in error_str:
exception_mapping_worked = True exception_mapping_worked = True
raise ContextWindowExceededError( raise ContextWindowExceededError(
message=f"{exception_provider} - {message}", message=f"ContextWindowExceededError: {exception_provider} - {message}",
llm_provider=custom_llm_provider, llm_provider=custom_llm_provider,
model=model, model=model,
response=original_exception.response, response=original_exception.response,
@ -8579,7 +8579,7 @@ def exception_type(
): ):
exception_mapping_worked = True exception_mapping_worked = True
raise ContentPolicyViolationError( raise ContentPolicyViolationError(
message=f"{exception_provider} - {message}", message=f"ContentPolicyViolationError: {exception_provider} - {message}",
llm_provider=custom_llm_provider, llm_provider=custom_llm_provider,
model=model, model=model,
response=original_exception.response, response=original_exception.response,
@ -8591,7 +8591,7 @@ def exception_type(
): ):
exception_mapping_worked = True exception_mapping_worked = True
raise BadRequestError( raise BadRequestError(
message=f"{exception_provider} - {message}", message=f"BadRequestError: {exception_provider} - {message}",
llm_provider=custom_llm_provider, llm_provider=custom_llm_provider,
model=model, model=model,
response=original_exception.response, response=original_exception.response,
@ -8599,7 +8599,7 @@ def exception_type(
) )
elif "Request too large" in error_str: elif "Request too large" in error_str:
raise RateLimitError( raise RateLimitError(
message=f"{exception_provider} - {message}", message=f"RateLimitError: {exception_provider} - {message}",
model=model, model=model,
llm_provider=custom_llm_provider, llm_provider=custom_llm_provider,
response=original_exception.response, response=original_exception.response,
@ -8611,7 +8611,7 @@ def exception_type(
): ):
exception_mapping_worked = True exception_mapping_worked = True
raise AuthenticationError( raise AuthenticationError(
message=f"{exception_provider} - {message}", message=f"AuthenticationError: {exception_provider} - {message}",
llm_provider=custom_llm_provider, llm_provider=custom_llm_provider,
model=model, model=model,
response=original_exception.response, response=original_exception.response,
@ -8635,7 +8635,7 @@ def exception_type(
if original_exception.status_code == 401: if original_exception.status_code == 401:
exception_mapping_worked = True exception_mapping_worked = True
raise AuthenticationError( raise AuthenticationError(
message=f"{exception_provider} - {message}", message=f"AuthenticationError: {exception_provider} - {message}",
llm_provider=custom_llm_provider, llm_provider=custom_llm_provider,
model=model, model=model,
response=original_exception.response, response=original_exception.response,
@ -8644,7 +8644,7 @@ def exception_type(
elif original_exception.status_code == 404: elif original_exception.status_code == 404:
exception_mapping_worked = True exception_mapping_worked = True
raise NotFoundError( raise NotFoundError(
message=f"{exception_provider} - {message}", message=f"NotFoundError: {exception_provider} - {message}",
model=model, model=model,
llm_provider=custom_llm_provider, llm_provider=custom_llm_provider,
response=original_exception.response, response=original_exception.response,
@ -8653,7 +8653,7 @@ def exception_type(
elif original_exception.status_code == 408: elif original_exception.status_code == 408:
exception_mapping_worked = True exception_mapping_worked = True
raise Timeout( raise Timeout(
message=f"{exception_provider} - {message}", message=f"Timeout Error: {exception_provider} - {message}",
model=model, model=model,
llm_provider=custom_llm_provider, llm_provider=custom_llm_provider,
litellm_debug_info=extra_information, litellm_debug_info=extra_information,
@ -8661,7 +8661,7 @@ def exception_type(
elif original_exception.status_code == 422: elif original_exception.status_code == 422:
exception_mapping_worked = True exception_mapping_worked = True
raise BadRequestError( raise BadRequestError(
message=f"{exception_provider} - {message}", message=f"BadRequestError: {exception_provider} - {message}",
model=model, model=model,
llm_provider=custom_llm_provider, llm_provider=custom_llm_provider,
response=original_exception.response, response=original_exception.response,
@ -8670,7 +8670,7 @@ def exception_type(
elif original_exception.status_code == 429: elif original_exception.status_code == 429:
exception_mapping_worked = True exception_mapping_worked = True
raise RateLimitError( raise RateLimitError(
message=f"{exception_provider} - {message}", message=f"RateLimitError: {exception_provider} - {message}",
model=model, model=model,
llm_provider=custom_llm_provider, llm_provider=custom_llm_provider,
response=original_exception.response, response=original_exception.response,
@ -8679,7 +8679,7 @@ def exception_type(
elif original_exception.status_code == 503: elif original_exception.status_code == 503:
exception_mapping_worked = True exception_mapping_worked = True
raise ServiceUnavailableError( raise ServiceUnavailableError(
message=f"{exception_provider} - {message}", message=f"ServiceUnavailableError: {exception_provider} - {message}",
model=model, model=model,
llm_provider=custom_llm_provider, llm_provider=custom_llm_provider,
response=original_exception.response, response=original_exception.response,
@ -8688,7 +8688,7 @@ def exception_type(
elif original_exception.status_code == 504: # gateway timeout error elif original_exception.status_code == 504: # gateway timeout error
exception_mapping_worked = True exception_mapping_worked = True
raise Timeout( raise Timeout(
message=f"{exception_provider} - {message}", message=f"Timeout Error: {exception_provider} - {message}",
model=model, model=model,
llm_provider=custom_llm_provider, llm_provider=custom_llm_provider,
litellm_debug_info=extra_information, litellm_debug_info=extra_information,
@ -8697,7 +8697,7 @@ def exception_type(
exception_mapping_worked = True exception_mapping_worked = True
raise APIError( raise APIError(
status_code=original_exception.status_code, status_code=original_exception.status_code,
message=f"{exception_provider} - {message}", message=f"APIError: {exception_provider} - {message}",
llm_provider=custom_llm_provider, llm_provider=custom_llm_provider,
model=model, model=model,
request=original_exception.request, request=original_exception.request,
@ -8706,7 +8706,7 @@ def exception_type(
else: else:
# if no status code then it is an APIConnectionError: https://github.com/openai/openai-python#handling-errors # if no status code then it is an APIConnectionError: https://github.com/openai/openai-python#handling-errors
raise APIConnectionError( raise APIConnectionError(
message=f"{exception_provider} - {message}", message=f"APIConnectionError: {exception_provider} - {message}",
llm_provider=custom_llm_provider, llm_provider=custom_llm_provider,
model=model, model=model,
litellm_debug_info=extra_information, litellm_debug_info=extra_information,
@ -9082,7 +9082,7 @@ def exception_type(
): ):
exception_mapping_worked = True exception_mapping_worked = True
raise BadRequestError( raise BadRequestError(
message=f"VertexAIException - {error_str}", message=f"VertexAIException BadRequestError - {error_str}",
model=model, model=model,
llm_provider="vertex_ai", llm_provider="vertex_ai",
response=original_exception.response, response=original_exception.response,
@ -9094,7 +9094,7 @@ def exception_type(
): ):
exception_mapping_worked = True exception_mapping_worked = True
raise APIError( raise APIError(
message=f"VertexAIException - {error_str}", message=f"VertexAIException APIError - {error_str}",
status_code=500, status_code=500,
model=model, model=model,
llm_provider="vertex_ai", llm_provider="vertex_ai",
@ -9104,7 +9104,7 @@ def exception_type(
elif "403" in error_str: elif "403" in error_str:
exception_mapping_worked = True exception_mapping_worked = True
raise BadRequestError( raise BadRequestError(
message=f"VertexAIException - {error_str}", message=f"VertexAIException BadRequestError - {error_str}",
model=model, model=model,
llm_provider="vertex_ai", llm_provider="vertex_ai",
response=original_exception.response, response=original_exception.response,
@ -9113,7 +9113,7 @@ def exception_type(
elif "The response was blocked." in error_str: elif "The response was blocked." in error_str:
exception_mapping_worked = True exception_mapping_worked = True
raise UnprocessableEntityError( raise UnprocessableEntityError(
message=f"VertexAIException - {error_str}", message=f"VertexAIException UnprocessableEntityError - {error_str}",
model=model, model=model,
llm_provider="vertex_ai", llm_provider="vertex_ai",
litellm_debug_info=extra_information, litellm_debug_info=extra_information,
@ -9133,7 +9133,7 @@ def exception_type(
): ):
exception_mapping_worked = True exception_mapping_worked = True
raise RateLimitError( raise RateLimitError(
message=f"VertexAIException - {error_str}", message=f"VertexAIException RateLimitError - {error_str}",
model=model, model=model,
llm_provider="vertex_ai", llm_provider="vertex_ai",
litellm_debug_info=extra_information, litellm_debug_info=extra_information,
@ -9149,7 +9149,7 @@ def exception_type(
if original_exception.status_code == 400: if original_exception.status_code == 400:
exception_mapping_worked = True exception_mapping_worked = True
raise BadRequestError( raise BadRequestError(
message=f"VertexAIException - {error_str}", message=f"VertexAIException BadRequestError - {error_str}",
model=model, model=model,
llm_provider="vertex_ai", llm_provider="vertex_ai",
litellm_debug_info=extra_information, litellm_debug_info=extra_information,
@ -9158,7 +9158,7 @@ def exception_type(
if original_exception.status_code == 500: if original_exception.status_code == 500:
exception_mapping_worked = True exception_mapping_worked = True
raise APIError( raise APIError(
message=f"VertexAIException - {error_str}", message=f"VertexAIException APIError - {error_str}",
status_code=500, status_code=500,
model=model, model=model,
llm_provider="vertex_ai", llm_provider="vertex_ai",
@ -9763,7 +9763,7 @@ def exception_type(
exception_mapping_worked = True exception_mapping_worked = True
raise APIError( raise APIError(
status_code=500, status_code=500,
message=f"AzureException - {original_exception.message}", message=f"AzureException Internal server error - {original_exception.message}",
llm_provider="azure", llm_provider="azure",
model=model, model=model,
litellm_debug_info=extra_information, litellm_debug_info=extra_information,
@ -9772,7 +9772,7 @@ def exception_type(
elif "This model's maximum context length is" in error_str: elif "This model's maximum context length is" in error_str:
exception_mapping_worked = True exception_mapping_worked = True
raise ContextWindowExceededError( raise ContextWindowExceededError(
message=f"AzureException - {original_exception.message}", message=f"AzureException ContextWindowExceededError - {original_exception.message}",
llm_provider="azure", llm_provider="azure",
model=model, model=model,
litellm_debug_info=extra_information, litellm_debug_info=extra_information,
@ -9781,7 +9781,7 @@ def exception_type(
elif "DeploymentNotFound" in error_str: elif "DeploymentNotFound" in error_str:
exception_mapping_worked = True exception_mapping_worked = True
raise NotFoundError( raise NotFoundError(
message=f"AzureException - {original_exception.message}", message=f"AzureException NotFoundError - {original_exception.message}",
llm_provider="azure", llm_provider="azure",
model=model, model=model,
litellm_debug_info=extra_information, litellm_debug_info=extra_information,
@ -9796,7 +9796,7 @@ def exception_type(
): ):
exception_mapping_worked = True exception_mapping_worked = True
raise ContentPolicyViolationError( raise ContentPolicyViolationError(
message=f"AzureException - {original_exception.message}", message=f"AzureException ContentPolicyViolationError - {original_exception.message}",
llm_provider="azure", llm_provider="azure",
model=model, model=model,
litellm_debug_info=extra_information, litellm_debug_info=extra_information,
@ -9805,7 +9805,7 @@ def exception_type(
elif "invalid_request_error" in error_str: elif "invalid_request_error" in error_str:
exception_mapping_worked = True exception_mapping_worked = True
raise BadRequestError( raise BadRequestError(
message=f"AzureException - {original_exception.message}", message=f"AzureException BadRequestError - {original_exception.message}",
llm_provider="azure", llm_provider="azure",
model=model, model=model,
litellm_debug_info=extra_information, litellm_debug_info=extra_information,
@ -9817,7 +9817,7 @@ def exception_type(
): ):
exception_mapping_worked = True exception_mapping_worked = True
raise AuthenticationError( raise AuthenticationError(
message=f"{exception_provider} - {original_exception.message}", message=f"{exception_provider} AuthenticationError - {original_exception.message}",
llm_provider=custom_llm_provider, llm_provider=custom_llm_provider,
model=model, model=model,
litellm_debug_info=extra_information, litellm_debug_info=extra_information,
@ -9828,7 +9828,7 @@ def exception_type(
if original_exception.status_code == 401: if original_exception.status_code == 401:
exception_mapping_worked = True exception_mapping_worked = True
raise AuthenticationError( raise AuthenticationError(
message=f"AzureException - {original_exception.message}", message=f"AzureException AuthenticationError - {original_exception.message}",
llm_provider="azure", llm_provider="azure",
model=model, model=model,
litellm_debug_info=extra_information, litellm_debug_info=extra_information,
@ -9837,7 +9837,7 @@ def exception_type(
elif original_exception.status_code == 408: elif original_exception.status_code == 408:
exception_mapping_worked = True exception_mapping_worked = True
raise Timeout( raise Timeout(
message=f"AzureException - {original_exception.message}", message=f"AzureException Timeout - {original_exception.message}",
model=model, model=model,
litellm_debug_info=extra_information, litellm_debug_info=extra_information,
llm_provider="azure", llm_provider="azure",
@ -9845,7 +9845,7 @@ def exception_type(
if original_exception.status_code == 422: if original_exception.status_code == 422:
exception_mapping_worked = True exception_mapping_worked = True
raise BadRequestError( raise BadRequestError(
message=f"AzureException - {original_exception.message}", message=f"AzureException BadRequestError - {original_exception.message}",
model=model, model=model,
llm_provider="azure", llm_provider="azure",
litellm_debug_info=extra_information, litellm_debug_info=extra_information,
@ -9854,7 +9854,7 @@ def exception_type(
elif original_exception.status_code == 429: elif original_exception.status_code == 429:
exception_mapping_worked = True exception_mapping_worked = True
raise RateLimitError( raise RateLimitError(
message=f"AzureException - {original_exception.message}", message=f"AzureException RateLimitError - {original_exception.message}",
model=model, model=model,
llm_provider="azure", llm_provider="azure",
litellm_debug_info=extra_information, litellm_debug_info=extra_information,
@ -9863,7 +9863,7 @@ def exception_type(
elif original_exception.status_code == 503: elif original_exception.status_code == 503:
exception_mapping_worked = True exception_mapping_worked = True
raise ServiceUnavailableError( raise ServiceUnavailableError(
message=f"AzureException - {original_exception.message}", message=f"AzureException ServiceUnavailableError - {original_exception.message}",
model=model, model=model,
llm_provider="azure", llm_provider="azure",
litellm_debug_info=extra_information, litellm_debug_info=extra_information,
@ -9872,7 +9872,7 @@ def exception_type(
elif original_exception.status_code == 504: # gateway timeout error elif original_exception.status_code == 504: # gateway timeout error
exception_mapping_worked = True exception_mapping_worked = True
raise Timeout( raise Timeout(
message=f"AzureException - {original_exception.message}", message=f"AzureException Timeout - {original_exception.message}",
model=model, model=model,
litellm_debug_info=extra_information, litellm_debug_info=extra_information,
llm_provider="azure", llm_provider="azure",
@ -9881,7 +9881,7 @@ def exception_type(
exception_mapping_worked = True exception_mapping_worked = True
raise APIError( raise APIError(
status_code=original_exception.status_code, status_code=original_exception.status_code,
message=f"AzureException - {original_exception.message}", message=f"AzureException APIError - {original_exception.message}",
llm_provider="azure", llm_provider="azure",
litellm_debug_info=extra_information, litellm_debug_info=extra_information,
model=model, model=model,
@ -9892,7 +9892,7 @@ def exception_type(
else: else:
# if no status code then it is an APIConnectionError: https://github.com/openai/openai-python#handling-errors # if no status code then it is an APIConnectionError: https://github.com/openai/openai-python#handling-errors
raise APIConnectionError( raise APIConnectionError(
message=f"{exception_provider} - {message}", message=f"{exception_provider} APIConnectionError - {message}",
llm_provider="azure", llm_provider="azure",
model=model, model=model,
litellm_debug_info=extra_information, litellm_debug_info=extra_information,
@ -9904,7 +9904,7 @@ def exception_type(
): # deal with edge-case invalid request error bug in openai-python sdk ): # deal with edge-case invalid request error bug in openai-python sdk
exception_mapping_worked = True exception_mapping_worked = True
raise BadRequestError( raise BadRequestError(
message=f"{exception_provider}: This can happen due to missing AZURE_API_VERSION: {str(original_exception)}", message=f"{exception_provider} BadRequestError : This can happen due to missing AZURE_API_VERSION: {str(original_exception)}",
model=model, model=model,
llm_provider=custom_llm_provider, llm_provider=custom_llm_provider,
response=original_exception.response, response=original_exception.response,
@ -11416,11 +11416,8 @@ class CustomStreamWrapper:
self.response_id = original_chunk.id self.response_id = original_chunk.id
if len(original_chunk.choices) > 0: if len(original_chunk.choices) > 0:
delta = original_chunk.choices[0].delta delta = original_chunk.choices[0].delta
if ( if delta is not None and (
delta is not None and ( delta.function_call is not None or delta.tool_calls is not None
delta.function_call is not None
or delta.tool_calls is not None
)
): ):
try: try:
model_response.system_fingerprint = ( model_response.system_fingerprint = (
@ -11481,7 +11478,11 @@ class CustomStreamWrapper:
model_response.choices[0].delta = Delta() model_response.choices[0].delta = Delta()
else: else:
try: try:
delta = dict() if original_chunk.choices[0].delta is None else dict(original_chunk.choices[0].delta) delta = (
dict()
if original_chunk.choices[0].delta is None
else dict(original_chunk.choices[0].delta)
)
print_verbose(f"original delta: {delta}") print_verbose(f"original delta: {delta}")
model_response.choices[0].delta = Delta(**delta) model_response.choices[0].delta = Delta(**delta)
print_verbose( print_verbose(