forked from phoenix/litellm-mirror
(fix) fix server errors in previous commit
This commit is contained in:
parent
fbac910436
commit
e9ac3ca130
1 changed files with 1 additions and 22 deletions
|
@ -1,5 +1,5 @@
|
||||||
import litellm, os
|
import litellm, os
|
||||||
from fastapi import FastAPI, Request
|
from fastapi import FastAPI, Request, HTTPException
|
||||||
from fastapi.routing import APIRouter
|
from fastapi.routing import APIRouter
|
||||||
from fastapi.responses import StreamingResponse, FileResponse
|
from fastapi.responses import StreamingResponse, FileResponse
|
||||||
from fastapi.middleware.cors import CORSMiddleware
|
from fastapi.middleware.cors import CORSMiddleware
|
||||||
|
@ -60,7 +60,6 @@ async def completion(request: Request):
|
||||||
@router.post("/v1/chat/completions")
|
@router.post("/v1/chat/completions")
|
||||||
@router.post("/chat/completions")
|
@router.post("/chat/completions")
|
||||||
async def chat_completion(request: Request):
|
async def chat_completion(request: Request):
|
||||||
<<<<<<< HEAD
|
|
||||||
try:
|
try:
|
||||||
data = await request.json()
|
data = await request.json()
|
||||||
if "authorization" in request.headers: # if users pass LLM api keys as part of header
|
if "authorization" in request.headers: # if users pass LLM api keys as part of header
|
||||||
|
@ -75,26 +74,6 @@ async def chat_completion(request: Request):
|
||||||
return response
|
return response
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
return HTTPException(status_code=500, detail=str(e))
|
return HTTPException(status_code=500, detail=str(e))
|
||||||
=======
|
|
||||||
data = await request.json()
|
|
||||||
|
|
||||||
api_key = request.headers.get("authorization")
|
|
||||||
api_key = api_key.split(" ")[1]
|
|
||||||
## check for special character - '|' <- used for bedrock (aws_access_key + "|" + aws_secret_access_key + "|" + aws_region_name)
|
|
||||||
if "|" in api_key: ## BEDROCK
|
|
||||||
aws_keys = api_key.split("|")
|
|
||||||
data["aws_access_key_id"] = aws_keys[0]
|
|
||||||
data["aws_secret_access_key"] = aws_keys[1]
|
|
||||||
data["aws_region_name"] = aws_keys[2]
|
|
||||||
else: ## ALL OTHER PROVIDERS
|
|
||||||
data["api_key"] = api_key
|
|
||||||
response = litellm.completion(
|
|
||||||
**data
|
|
||||||
)
|
|
||||||
if 'stream' in data and data['stream'] == True: # use generate_responses to stream responses
|
|
||||||
return StreamingResponse(data_generator(response), media_type='text/event-stream')
|
|
||||||
return response
|
|
||||||
>>>>>>> 968b835 (fix(openai-proxy): adding langfuse)
|
|
||||||
|
|
||||||
@router.get("/")
|
@router.get("/")
|
||||||
async def home(request: Request):
|
async def home(request: Request):
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue