From 7d6d6ec582f17fb35f8b2c3521bc75663db1fc0b Mon Sep 17 00:00:00 2001 From: Krrish Dholakia Date: Thu, 26 Oct 2023 18:07:43 -0700 Subject: [PATCH] build(litellm_server/main.py): removing print statements --- litellm_server/main.py | 5 ----- 1 file changed, 5 deletions(-) diff --git a/litellm_server/main.py b/litellm_server/main.py index aa0391891..76fd5f048 100644 --- a/litellm_server/main.py +++ b/litellm_server/main.py @@ -92,17 +92,12 @@ async def chat_completion(request: Request): data = await request.json() # default to always using the "ENV" variables, only if AUTH_STRATEGY==DYNAMIC then reads headers env_validation = litellm.validate_environment(model=data["model"]) - print(f"keys_in_environment: {env_validation['keys_in_environment'] is False}") - print(f"auth in request headers: {'authorization' in request.headers}") if (env_validation['keys_in_environment'] is False or os.getenv("AUTH_STRATEGY", None) == "DYNAMIC") and "authorization" in request.headers: # if users pass LLM api keys as part of header api_key = request.headers.get("authorization") - print(f"api key from headers: {api_key}") api_key = api_key.replace("Bearer", "").strip() if len(api_key) > 0: api_key = api_key data["api_key"] = api_key - print(f"final api key: {api_key}") - print(f"data going into litellm: {data}") response = litellm.completion( **data )