refactor: add black formatting

This commit is contained in:
Krrish Dholakia 2023-12-25 14:10:38 +05:30
parent b87d630b0a
commit 4905929de3
156 changed files with 19723 additions and 10869 deletions

View file

@ -16,11 +16,11 @@ from openai import (
RateLimitError,
APIStatusError,
OpenAIError,
APIError,
APITimeoutError,
APIConnectionError,
APIError,
APITimeoutError,
APIConnectionError,
APIResponseValidationError,
UnprocessableEntityError
UnprocessableEntityError,
)
import httpx
@ -32,11 +32,10 @@ class AuthenticationError(AuthenticationError): # type: ignore
self.llm_provider = llm_provider
self.model = model
super().__init__(
self.message,
response=response,
body=None
self.message, response=response, body=None
) # Call the base class constructor with the parameters it needs
# raise when invalid models passed, example gpt-8
class NotFoundError(NotFoundError): # type: ignore
def __init__(self, message, model, llm_provider, response: httpx.Response):
@ -45,9 +44,7 @@ class NotFoundError(NotFoundError): # type: ignore
self.model = model
self.llm_provider = llm_provider
super().__init__(
self.message,
response=response,
body=None
self.message, response=response, body=None
) # Call the base class constructor with the parameters it needs
@ -58,23 +55,21 @@ class BadRequestError(BadRequestError): # type: ignore
self.model = model
self.llm_provider = llm_provider
super().__init__(
self.message,
response=response,
body=None
self.message, response=response, body=None
) # Call the base class constructor with the parameters it needs
class UnprocessableEntityError(UnprocessableEntityError): # type: ignore
class UnprocessableEntityError(UnprocessableEntityError): # type: ignore
def __init__(self, message, model, llm_provider, response: httpx.Response):
self.status_code = 422
self.message = message
self.model = model
self.llm_provider = llm_provider
super().__init__(
self.message,
response=response,
body=None
self.message, response=response, body=None
) # Call the base class constructor with the parameters it needs
class Timeout(APITimeoutError): # type: ignore
def __init__(self, message, model, llm_provider):
self.status_code = 408
@ -86,6 +81,7 @@ class Timeout(APITimeoutError): # type: ignore
request=request
) # Call the base class constructor with the parameters it needs
class RateLimitError(RateLimitError): # type: ignore
def __init__(self, message, llm_provider, model, response: httpx.Response):
self.status_code = 429
@ -93,11 +89,10 @@ class RateLimitError(RateLimitError): # type: ignore
self.llm_provider = llm_provider
self.modle = model
super().__init__(
self.message,
response=response,
body=None
self.message, response=response, body=None
) # Call the base class constructor with the parameters it needs
# sub class of rate limit error - meant to give more granularity for error handling context window exceeded errors
class ContextWindowExceededError(BadRequestError): # type: ignore
def __init__(self, message, model, llm_provider, response: httpx.Response):
@ -106,12 +101,13 @@ class ContextWindowExceededError(BadRequestError): # type: ignore
self.model = model
self.llm_provider = llm_provider
super().__init__(
message=self.message,
model=self.model, # type: ignore
llm_provider=self.llm_provider, # type: ignore
response=response
message=self.message,
model=self.model, # type: ignore
llm_provider=self.llm_provider, # type: ignore
response=response,
) # Call the base class constructor with the parameters it needs
class ServiceUnavailableError(APIStatusError): # type: ignore
def __init__(self, message, llm_provider, model, response: httpx.Response):
self.status_code = 503
@ -119,50 +115,42 @@ class ServiceUnavailableError(APIStatusError): # type: ignore
self.llm_provider = llm_provider
self.model = model
super().__init__(
self.message,
response=response,
body=None
self.message, response=response, body=None
) # Call the base class constructor with the parameters it needs
# raise this when the API returns an invalid response object - https://github.com/openai/openai-python/blob/1be14ee34a0f8e42d3f9aa5451aa4cb161f1781f/openai/api_requestor.py#L401
class APIError(APIError): # type: ignore
def __init__(self, status_code, message, llm_provider, model, request: httpx.Request):
self.status_code = status_code
class APIError(APIError): # type: ignore
def __init__(
self, status_code, message, llm_provider, model, request: httpx.Request
):
self.status_code = status_code
self.message = message
self.llm_provider = llm_provider
self.model = model
super().__init__(
self.message,
request=request, # type: ignore
body=None
)
super().__init__(self.message, request=request, body=None) # type: ignore
# raised if an invalid request (not get, delete, put, post) is made
class APIConnectionError(APIConnectionError): # type: ignore
class APIConnectionError(APIConnectionError): # type: ignore
def __init__(self, message, llm_provider, model, request: httpx.Request):
self.message = message
self.llm_provider = llm_provider
self.model = model
self.status_code = 500
super().__init__(
message=self.message,
request=request
)
super().__init__(message=self.message, request=request)
# raised if an invalid request (not get, delete, put, post) is made
class APIResponseValidationError(APIResponseValidationError): # type: ignore
class APIResponseValidationError(APIResponseValidationError): # type: ignore
def __init__(self, message, llm_provider, model):
self.message = message
self.llm_provider = llm_provider
self.model = model
request = httpx.Request(method="POST", url="https://api.openai.com/v1")
response = httpx.Response(status_code=500, request=request)
super().__init__(
response=response,
body=None,
message=message
)
super().__init__(response=response, body=None, message=message)
class OpenAIError(OpenAIError): # type: ignore
def __init__(self, original_exception):
@ -176,6 +164,7 @@ class OpenAIError(OpenAIError): # type: ignore
)
self.llm_provider = "openai"
class BudgetExceededError(Exception):
def __init__(self, current_cost, max_budget):
self.current_cost = current_cost
@ -183,7 +172,8 @@ class BudgetExceededError(Exception):
message = f"Budget has been exceeded! Current cost: {current_cost}, Max budget: {max_budget}"
super().__init__(message)
## DEPRECATED ##
## DEPRECATED ##
class InvalidRequestError(BadRequestError): # type: ignore
def __init__(self, message, model, llm_provider):
self.status_code = 400