fix(add-custom-success-callback-for-streaming): add custom success callback for streaming

This commit is contained in:
Krrish Dholakia 2023-10-06 15:01:50 -07:00
parent 868c1c594f
commit 7e34736a38
8 changed files with 89 additions and 20 deletions

View file

@ -18,10 +18,12 @@ print()
import litellm
from fastapi import FastAPI, Request
from fastapi.routing import APIRouter
from fastapi.responses import StreamingResponse
import json
app = FastAPI()
router = APIRouter()
user_api_base = None
user_model = None
@ -109,14 +111,14 @@ def data_generator(response):
yield f"data: {json.dumps(chunk)}\n\n"
#### API ENDPOINTS ####
@app.get("/models") # if project requires model list
@router.get("/models") # if project requires model list
def model_list():
return dict(
data=[{"id": user_model, "object": "model", "created": 1677610602, "owned_by": "openai"}],
object="list",
)
@app.post("/{version}/completions")
@router.post("/completions")
async def completion(request: Request):
data = await request.json()
print_verbose(f"data passed in: {data}")
@ -149,7 +151,7 @@ async def completion(request: Request):
return StreamingResponse(data_generator(response), media_type='text/event-stream')
return response
@app.post("/chat/completions")
@router.post("/chat/completions")
async def chat_completion(request: Request):
data = await request.json()
print_verbose(f"data passed in: {data}")
@ -186,4 +188,6 @@ async def chat_completion(request: Request):
if 'stream' in data and data['stream'] == True: # use generate_responses to stream responses
return StreamingResponse(data_generator(response), media_type='text/event-stream')
print_verbose(f"response: {response}")
return response
return response
app.include_router(router)