forked from phoenix/litellm-mirror
(fix) proxy_server return errors on proxy
This commit is contained in:
parent
af5685ba5e
commit
b904d513fd
1 changed files with 14 additions and 11 deletions
|
@ -1,6 +1,6 @@
|
||||||
|
|
||||||
import litellm
|
import litellm
|
||||||
from fastapi import FastAPI, Request
|
from fastapi import FastAPI, Request, HTTPException
|
||||||
from fastapi.routing import APIRouter
|
from fastapi.routing import APIRouter
|
||||||
from fastapi.responses import StreamingResponse, FileResponse
|
from fastapi.responses import StreamingResponse, FileResponse
|
||||||
from fastapi.middleware.cors import CORSMiddleware
|
from fastapi.middleware.cors import CORSMiddleware
|
||||||
|
@ -58,6 +58,7 @@ async def completion(request: Request):
|
||||||
@router.post("/v1/chat/completions")
|
@router.post("/v1/chat/completions")
|
||||||
@router.post("/chat/completions")
|
@router.post("/chat/completions")
|
||||||
async def chat_completion(request: Request):
|
async def chat_completion(request: Request):
|
||||||
|
try:
|
||||||
data = await request.json()
|
data = await request.json()
|
||||||
|
|
||||||
api_key = request.headers.get("authorization")
|
api_key = request.headers.get("authorization")
|
||||||
|
@ -69,6 +70,8 @@ async def chat_completion(request: Request):
|
||||||
if 'stream' in data and data['stream'] == True: # use generate_responses to stream responses
|
if 'stream' in data and data['stream'] == True: # use generate_responses to stream responses
|
||||||
return StreamingResponse(data_generator(response), media_type='text/event-stream')
|
return StreamingResponse(data_generator(response), media_type='text/event-stream')
|
||||||
return response
|
return response
|
||||||
|
except Exception as e:
|
||||||
|
return HTTPException(status_code=500, detail=str(e))
|
||||||
|
|
||||||
@router.get("/")
|
@router.get("/")
|
||||||
async def home(request: Request):
|
async def home(request: Request):
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue