From cb20554c79e29fc2dd5fcebc46b545db0605da0f Mon Sep 17 00:00:00 2001 From: Krrish Dholakia Date: Thu, 2 Nov 2023 07:07:38 -0700 Subject: [PATCH] fix(proxy_server.py): accept single quote json body --- litellm/proxy/proxy_server.py | 9 ++++++--- 1 file changed, 6 insertions(+), 3 deletions(-) diff --git a/litellm/proxy/proxy_server.py b/litellm/proxy/proxy_server.py index 867853dcec..603d5b6d26 100644 --- a/litellm/proxy/proxy_server.py +++ b/litellm/proxy/proxy_server.py @@ -1,5 +1,5 @@ import sys, os, platform, time, copy -import threading +import threading, ast import shutil, random, traceback, requests messages: list = [] @@ -508,7 +508,9 @@ def model_list(): @router.post("/completions") @router.post("/engines/{model:path}/completions") async def completion(request: Request): - data = await request.json() + body = await request.body() + body = body.decode() + data = ast.literal_eval(body) return litellm_completion(data=data, type="completion", user_model=user_model, user_temperature=user_temperature, user_max_tokens=user_max_tokens, user_api_base=user_api_base, user_headers=user_headers, user_debug=user_debug, model_router=model_router, user_request_timeout=user_request_timeout) @@ -517,7 +519,8 @@ async def completion(request: Request): @router.post("/v1/chat/completions") @router.post("/chat/completions") async def chat_completion(request: Request): - data = await request.json() + body = body.decode() + data = ast.literal_eval(body) print_verbose(f"data passed in: {data}") return litellm_completion(data, type="chat_completion", user_model=user_model, user_temperature=user_temperature, user_max_tokens=user_max_tokens,