feat(litellm_pre_call_utils.py): support 'add_user_information_to_llm… (#6390)

* feat(litellm_pre_call_utils.py): support 'add_user_information_to_llm_headers' param

enables passing user info to backend llm (user request for custom vllm server)

* fix(litellm_logging.py): fix linting error
This commit is contained in:
Krish Dholakia 2024-10-24 22:03:16 -07:00 committed by GitHub
parent 4e310051c7
commit 9fccf829b1
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
6 changed files with 221 additions and 73 deletions

View file

@ -203,7 +203,7 @@ def test_add_headers_to_request(litellm_key_header_name):
import json
from litellm.proxy.litellm_pre_call_utils import (
clean_headers,
get_forwardable_headers,
LiteLLMProxyRequestSetup,
)
headers = {
@ -215,7 +215,9 @@ def test_add_headers_to_request(litellm_key_header_name):
request._url = URL(url="/chat/completions")
request._body = json.dumps({"model": "gpt-3.5-turbo"}).encode("utf-8")
request_headers = clean_headers(headers, litellm_key_header_name)
forwarded_headers = get_forwardable_headers(request_headers)
forwarded_headers = LiteLLMProxyRequestSetup._get_forwardable_headers(
request_headers
)
assert forwarded_headers == {"X-Custom-Header": "Custom-Value"}

View file

@ -371,12 +371,12 @@ def test_is_request_body_safe_model_enabled(
def test_reading_openai_org_id_from_headers():
from litellm.proxy.litellm_pre_call_utils import get_openai_org_id_from_headers
from litellm.proxy.litellm_pre_call_utils import LiteLLMProxyRequestSetup
headers = {
"OpenAI-Organization": "test_org_id",
}
org_id = get_openai_org_id_from_headers(headers)
org_id = LiteLLMProxyRequestSetup.get_openai_org_id_from_headers(headers)
assert org_id == "test_org_id"
@ -399,11 +399,44 @@ def test_reading_openai_org_id_from_headers():
)
def test_add_litellm_data_for_backend_llm_call(headers, expected_data):
import json
from litellm.proxy.litellm_pre_call_utils import (
add_litellm_data_for_backend_llm_call,
from litellm.proxy.litellm_pre_call_utils import LiteLLMProxyRequestSetup
from litellm.proxy._types import UserAPIKeyAuth
user_api_key_dict = UserAPIKeyAuth(
api_key="test_api_key", user_id="test_user_id", org_id="test_org_id"
)
data = add_litellm_data_for_backend_llm_call(headers)
data = LiteLLMProxyRequestSetup.add_litellm_data_for_backend_llm_call(
headers=headers,
user_api_key_dict=user_api_key_dict,
general_settings=None,
)
assert json.dumps(data, sort_keys=True) == json.dumps(expected_data, sort_keys=True)
def test_foward_litellm_user_info_to_backend_llm_call():
import json
litellm.add_user_information_to_llm_headers = True
from litellm.proxy.litellm_pre_call_utils import LiteLLMProxyRequestSetup
from litellm.proxy._types import UserAPIKeyAuth
user_api_key_dict = UserAPIKeyAuth(
api_key="test_api_key", user_id="test_user_id", org_id="test_org_id"
)
data = LiteLLMProxyRequestSetup.add_headers_to_llm_call(
headers={},
user_api_key_dict=user_api_key_dict,
)
expected_data = {
"x-litellm-user_api_key_user_id": "test_user_id",
"x-litellm-user_api_key_org_id": "test_org_id",
"x-litellm-user_api_key_hash": "test_api_key",
}
assert json.dumps(data, sort_keys=True) == json.dumps(expected_data, sort_keys=True)