From 2101029d0d45b4e9dadddbb1a23caa47bedb14d5 Mon Sep 17 00:00:00 2001 From: ishaan-jaff Date: Thu, 30 Nov 2023 20:00:35 -0800 Subject: [PATCH] (feat) proxy: /embedding -> use ORJSON responses --- litellm/proxy/proxy_server.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/litellm/proxy/proxy_server.py b/litellm/proxy/proxy_server.py index 010bb6cde2..298787a22b 100644 --- a/litellm/proxy/proxy_server.py +++ b/litellm/proxy/proxy_server.py @@ -94,7 +94,7 @@ litellm.suppress_debug_info = True from fastapi import FastAPI, Request, HTTPException, status, Depends from fastapi.routing import APIRouter from fastapi.encoders import jsonable_encoder -from fastapi.responses import StreamingResponse, FileResponse +from fastapi.responses import StreamingResponse, FileResponse, ORJSONResponse from fastapi.middleware.cors import CORSMiddleware from fastapi.security.api_key import APIKeyHeader import json @@ -779,8 +779,8 @@ async def chat_completion(request: Request, model: Optional[str] = None, user_ap detail=error_msg ) -@router.post("/v1/embeddings", dependencies=[Depends(user_api_key_auth)]) -@router.post("/embeddings", dependencies=[Depends(user_api_key_auth)]) +@router.post("/v1/embeddings", dependencies=[Depends(user_api_key_auth)], response_class=ORJSONResponse) +@router.post("/embeddings", dependencies=[Depends(user_api_key_auth)], response_class=ORJSONResponse) async def embeddings(request: Request, user_api_key_dict: dict = Depends(user_api_key_auth)): try: