From e96a60893860623e1fa7e32285a6f4de77e50533 Mon Sep 17 00:00:00 2001 From: ishaan-jaff Date: Mon, 4 Dec 2023 10:48:59 -0800 Subject: [PATCH] (feat) proxy: set custom headers in metadata --- litellm/proxy/proxy_server.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/litellm/proxy/proxy_server.py b/litellm/proxy/proxy_server.py index 9ebafe2dd..1238fd8e2 100644 --- a/litellm/proxy/proxy_server.py +++ b/litellm/proxy/proxy_server.py @@ -923,9 +923,10 @@ async def chat_completion(request: Request, model: Optional[str] = None, user_ap if "metadata" in data: data["metadata"]["user_api_key"] = user_api_key_dict["api_key"] + data["metadata"]["headers"] = request.headers else: data["metadata"] = {"user_api_key": user_api_key_dict["api_key"]} - + data["metadata"]["headers"] = request.headers global user_temperature, user_request_timeout, user_max_tokens, user_api_base # override with user settings, these are params passed via cli if user_temperature: