mirror of
https://github.com/BerriAI/litellm.git
synced 2025-04-27 03:34:10 +00:00
fix(proxy_server.py): fix returning response headers on exception
This commit is contained in:
parent
17635450cd
commit
e0c2940f22
1 changed files with 2 additions and 10 deletions
|
@ -329,6 +329,7 @@ class UserAPIKeyCacheTTLEnum(enum.Enum):
|
||||||
@app.exception_handler(ProxyException)
|
@app.exception_handler(ProxyException)
|
||||||
async def openai_exception_handler(request: Request, exc: ProxyException):
|
async def openai_exception_handler(request: Request, exc: ProxyException):
|
||||||
# NOTE: DO NOT MODIFY THIS, its crucial to map to Openai exceptions
|
# NOTE: DO NOT MODIFY THIS, its crucial to map to Openai exceptions
|
||||||
|
headers = exc.headers
|
||||||
return JSONResponse(
|
return JSONResponse(
|
||||||
status_code=(
|
status_code=(
|
||||||
int(exc.code) if exc.code else status.HTTP_500_INTERNAL_SERVER_ERROR
|
int(exc.code) if exc.code else status.HTTP_500_INTERNAL_SERVER_ERROR
|
||||||
|
@ -341,6 +342,7 @@ async def openai_exception_handler(request: Request, exc: ProxyException):
|
||||||
"code": exc.code,
|
"code": exc.code,
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
headers=headers,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@ -2710,16 +2712,6 @@ def model_list(
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
@app.exception_handler(ProxyException)
|
|
||||||
async def proxy_exception_handler(request: Request, exc: ProxyException):
|
|
||||||
headers = exc.headers
|
|
||||||
return JSONResponse(
|
|
||||||
status_code=exc.code or 400,
|
|
||||||
content=exc.to_dict(),
|
|
||||||
headers=headers,
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
@router.post(
|
@router.post(
|
||||||
"/v1/chat/completions",
|
"/v1/chat/completions",
|
||||||
dependencies=[Depends(user_api_key_auth)],
|
dependencies=[Depends(user_api_key_auth)],
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue