From 2b889b83b35ed02e498933f98812828127feeb55 Mon Sep 17 00:00:00 2001 From: Ishaan Jaff Date: Fri, 26 Jul 2024 18:06:00 -0700 Subject: [PATCH] fix /v1/batches POST --- litellm/proxy/proxy_server.py | 28 ++++++++++++++++++++++------ 1 file changed, 22 insertions(+), 6 deletions(-) diff --git a/litellm/proxy/proxy_server.py b/litellm/proxy/proxy_server.py index 1f35a06f0a..1ec2b38149 100644 --- a/litellm/proxy/proxy_server.py +++ b/litellm/proxy/proxy_server.py @@ -4808,10 +4808,18 @@ async def create_batch( """ global proxy_logging_obj data: Dict = {} + try: - # Use orjson to parse JSON data, orjson speeds up requests significantly - form_data = await request.form() - data = {key: value for key, value in form_data.items() if key != "file"} + body = await request.body() + body_str = body.decode() + try: + data = ast.literal_eval(body_str) + except: + data = json.loads(body_str) + + verbose_proxy_logger.debug( + "Request received by LiteLLM:\n{}".format(json.dumps(data, indent=4)), + ) # Include original request and headers in the data data = await add_litellm_data_to_request( @@ -4915,10 +4923,18 @@ async def retrieve_batch( """ global proxy_logging_obj data: Dict = {} + data = {} try: - # Use orjson to parse JSON data, orjson speeds up requests significantly - form_data = await request.form() - data = {key: value for key, value in form_data.items() if key != "file"} + body = await request.body() + body_str = body.decode() + try: + data = ast.literal_eval(body_str) + except: + data = json.loads(body_str) + + verbose_proxy_logger.debug( + "Request received by LiteLLM:\n{}".format(json.dumps(data, indent=4)), + ) # Include original request and headers in the data data = await add_litellm_data_to_request(