(feat) proxy - introduce OpenAIException class

This commit is contained in:
ishaan-jaff 2024-01-15 09:52:23 -08:00
parent ad06b08a5e
commit 9716108c80

View file

@ -94,7 +94,12 @@ from fastapi import (
from fastapi.routing import APIRouter
from fastapi.security import OAuth2PasswordBearer
from fastapi.encoders import jsonable_encoder
from fastapi.responses import StreamingResponse, FileResponse, ORJSONResponse
from fastapi.responses import (
StreamingResponse,
FileResponse,
ORJSONResponse,
JSONResponse,
)
from fastapi.middleware.cors import CORSMiddleware
from fastapi.security.api_key import APIKeyHeader
import json
@ -106,6 +111,39 @@ app = FastAPI(
title="LiteLLM API",
description="Proxy Server to call 100+ LLMs in the OpenAI format\n\nAdmin Panel on [https://dashboard.litellm.ai/admin](https://dashboard.litellm.ai/admin)",
)
class OpenAIException(Exception):
def __init__(
self,
message: str,
type: str,
param: Optional[str],
code: Optional[int],
):
self.message = message
self.type = type
self.param = param
self.code = code
@app.exception_handler(OpenAIException)
async def openai_exception_handler(request: Request, exc: OpenAIException):
return JSONResponse(
status_code=int(exc.code)
if exc.code
else status.HTTP_500_INTERNAL_SERVER_ERROR,
content={
"error": {
"message": exc.message,
"type": exc.type,
"param": exc.param,
"code": exc.code,
}
},
)
router = APIRouter()
origins = ["*"]
@ -1611,11 +1649,13 @@ async def chat_completion(
else:
error_traceback = traceback.format_exc()
error_msg = f"{str(e)}\n\n{error_traceback}"
try:
status = e.status_code # type: ignore
except:
status = 500
raise HTTPException(status_code=status, detail=error_msg)
raise OpenAIException(
message=getattr(e, "message", error_msg),
type=getattr(e, "type", "None"),
param=getattr(e, "param", "None"),
code=getattr(e, "status_code", 500),
)
@router.post(