forked from phoenix/litellm-mirror
Merge pull request #4739 from BerriAI/litellm_custom_header_master_key
feat - use custom api key header name when using litellm virtual keys
This commit is contained in:
commit
3a57fef448
4 changed files with 148 additions and 3 deletions
|
@ -347,6 +347,70 @@ curl --location 'http://localhost:4000/key/generate' \
|
||||||
"max_budget": 0,}'
|
"max_budget": 0,}'
|
||||||
```
|
```
|
||||||
|
|
||||||
|
## Advanced - Pass LiteLLM Key in custom header
|
||||||
|
|
||||||
|
Use this to make LiteLLM proxy look for the virtual key in a custom header instead of the default `"Authorization"` header
|
||||||
|
|
||||||
|
**Step 1** Define `litellm_key_header_name` name on litellm config.yaml
|
||||||
|
|
||||||
|
```yaml
|
||||||
|
model_list:
|
||||||
|
- model_name: fake-openai-endpoint
|
||||||
|
litellm_params:
|
||||||
|
model: openai/fake
|
||||||
|
api_key: fake-key
|
||||||
|
api_base: https://exampleopenaiendpoint-production.up.railway.app/
|
||||||
|
|
||||||
|
general_settings:
|
||||||
|
master_key: sk-1234
|
||||||
|
litellm_key_header_name: "X-Litellm-Key" # 👈 Key Change
|
||||||
|
|
||||||
|
```
|
||||||
|
|
||||||
|
**Step 2** Test it
|
||||||
|
|
||||||
|
In this request, litellm will use the Virtual key in the `X-Litellm-Key` header
|
||||||
|
|
||||||
|
<Tabs>
|
||||||
|
<TabItem value="curl" label="curl">
|
||||||
|
|
||||||
|
```shell
|
||||||
|
curl http://localhost:4000/v1/chat/completions \
|
||||||
|
-H "Content-Type: application/json" \
|
||||||
|
-H "X-Litellm-Key: Bearer sk-1234" \
|
||||||
|
-H "Authorization: Bearer bad-key" \
|
||||||
|
-d '{
|
||||||
|
"model": "fake-openai-endpoint",
|
||||||
|
"messages": [
|
||||||
|
{"role": "user", "content": "Hello, Claude gm!"}
|
||||||
|
]
|
||||||
|
}'
|
||||||
|
```
|
||||||
|
|
||||||
|
**Expected Response**
|
||||||
|
|
||||||
|
Expect to see a successfull response from the litellm proxy since the key passed in `X-Litellm-Key` is valid
|
||||||
|
```shell
|
||||||
|
{"id":"chatcmpl-f9b2b79a7c30477ab93cd0e717d1773e","choices":[{"finish_reason":"stop","index":0,"message":{"content":"\n\nHello there, how may I assist you today?","role":"assistant","tool_calls":null,"function_call":null}}],"created":1677652288,"model":"gpt-3.5-turbo-0125","object":"chat.completion","system_fingerprint":"fp_44709d6fcb","usage":{"completion_tokens":12,"prompt_tokens":9,"total_tokens":21}
|
||||||
|
```
|
||||||
|
|
||||||
|
</TabItem>
|
||||||
|
|
||||||
|
<TabItem value="python" label="OpenAI Python SDK">
|
||||||
|
|
||||||
|
```python
|
||||||
|
client = openai.OpenAI(
|
||||||
|
api_key="not-used",
|
||||||
|
base_url="https://api-gateway-url.com/llmservc/api/litellmp",
|
||||||
|
default_headers={
|
||||||
|
"Authorization": f"Bearer {API_GATEWAY_TOKEN}", # (optional) For your API Gateway
|
||||||
|
"X-Litellm-Key": f"Bearer sk-1234" # For LiteLLM Proxy
|
||||||
|
}
|
||||||
|
)
|
||||||
|
```
|
||||||
|
</TabItem>
|
||||||
|
</Tabs>
|
||||||
|
|
||||||
## Advanced - Custom Auth
|
## Advanced - Custom Auth
|
||||||
|
|
||||||
You can now override the default api key auth.
|
You can now override the default api key auth.
|
||||||
|
|
|
@ -123,13 +123,10 @@ async def user_api_key_auth(
|
||||||
if isinstance(api_key, str):
|
if isinstance(api_key, str):
|
||||||
passed_in_key = api_key
|
passed_in_key = api_key
|
||||||
api_key = _get_bearer_token(api_key=api_key)
|
api_key = _get_bearer_token(api_key=api_key)
|
||||||
|
|
||||||
elif isinstance(azure_api_key_header, str):
|
elif isinstance(azure_api_key_header, str):
|
||||||
api_key = azure_api_key_header
|
api_key = azure_api_key_header
|
||||||
|
|
||||||
elif isinstance(anthropic_api_key_header, str):
|
elif isinstance(anthropic_api_key_header, str):
|
||||||
api_key = anthropic_api_key_header
|
api_key = anthropic_api_key_header
|
||||||
|
|
||||||
elif pass_through_endpoints is not None:
|
elif pass_through_endpoints is not None:
|
||||||
for endpoint in pass_through_endpoints:
|
for endpoint in pass_through_endpoints:
|
||||||
if endpoint.get("path", "") == route:
|
if endpoint.get("path", "") == route:
|
||||||
|
@ -138,6 +135,15 @@ async def user_api_key_auth(
|
||||||
header_key: str = headers.get("litellm_user_api_key", "")
|
header_key: str = headers.get("litellm_user_api_key", "")
|
||||||
if request.headers.get(key=header_key) is not None:
|
if request.headers.get(key=header_key) is not None:
|
||||||
api_key = request.headers.get(key=header_key)
|
api_key = request.headers.get(key=header_key)
|
||||||
|
|
||||||
|
# if user wants to pass LiteLLM_Master_Key as a custom header, example pass litellm keys as X-LiteLLM-Key: Bearer sk-1234
|
||||||
|
custom_litellm_key_header_name = general_settings.get("litellm_key_header_name")
|
||||||
|
if custom_litellm_key_header_name is not None:
|
||||||
|
api_key = get_api_key_from_custom_header(
|
||||||
|
request=request,
|
||||||
|
custom_litellm_key_header_name=custom_litellm_key_header_name,
|
||||||
|
)
|
||||||
|
|
||||||
parent_otel_span: Optional[Span] = None
|
parent_otel_span: Optional[Span] = None
|
||||||
if open_telemetry_logger is not None:
|
if open_telemetry_logger is not None:
|
||||||
parent_otel_span = open_telemetry_logger.tracer.start_span(
|
parent_otel_span = open_telemetry_logger.tracer.start_span(
|
||||||
|
@ -1267,3 +1273,27 @@ def _check_valid_ip(allowed_ips: Optional[List[str]], request: Request) -> bool:
|
||||||
return False
|
return False
|
||||||
|
|
||||||
return True
|
return True
|
||||||
|
|
||||||
|
|
||||||
|
def get_api_key_from_custom_header(
|
||||||
|
request: Request, custom_litellm_key_header_name: str
|
||||||
|
):
|
||||||
|
# use this as the virtual key passed to litellm proxy
|
||||||
|
custom_litellm_key_header_name = custom_litellm_key_header_name.lower()
|
||||||
|
verbose_proxy_logger.debug(
|
||||||
|
"searching for custom_litellm_key_header_name= %s",
|
||||||
|
custom_litellm_key_header_name,
|
||||||
|
)
|
||||||
|
custom_api_key = request.headers.get(custom_litellm_key_header_name)
|
||||||
|
if custom_api_key:
|
||||||
|
api_key = _get_bearer_token(api_key=custom_api_key)
|
||||||
|
verbose_proxy_logger.debug(
|
||||||
|
"Found custom API key using header: {}, setting api_key={}".format(
|
||||||
|
custom_litellm_key_header_name, api_key
|
||||||
|
)
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
raise ValueError(
|
||||||
|
f"No LiteLLM Virtual Key pass. Please set header={custom_litellm_key_header_name}: Bearer <api_key>"
|
||||||
|
)
|
||||||
|
return api_key
|
||||||
|
|
|
@ -17,6 +17,7 @@ model_list:
|
||||||
|
|
||||||
general_settings:
|
general_settings:
|
||||||
master_key: sk-1234
|
master_key: sk-1234
|
||||||
|
litellm_key_header_name: "X-Litellm-Key"
|
||||||
|
|
||||||
litellm_settings:
|
litellm_settings:
|
||||||
cache: true
|
cache: true
|
||||||
|
|
|
@ -2545,3 +2545,53 @@ async def test_update_user_role(prisma_client):
|
||||||
# use generated key to auth in
|
# use generated key to auth in
|
||||||
result = await user_api_key_auth(request=request, api_key=api_key)
|
result = await user_api_key_auth(request=request, api_key=api_key)
|
||||||
print("result from user auth with new key", result)
|
print("result from user auth with new key", result)
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.asyncio()
|
||||||
|
async def test_custom_api_key_header_name(prisma_client):
|
||||||
|
""" """
|
||||||
|
setattr(litellm.proxy.proxy_server, "prisma_client", prisma_client)
|
||||||
|
setattr(litellm.proxy.proxy_server, "master_key", "sk-1234")
|
||||||
|
setattr(
|
||||||
|
litellm.proxy.proxy_server,
|
||||||
|
"general_settings",
|
||||||
|
{"litellm_key_header_name": "x-litellm-key"},
|
||||||
|
)
|
||||||
|
await litellm.proxy.proxy_server.prisma_client.connect()
|
||||||
|
|
||||||
|
api_route = APIRoute(path="/chat/completions", endpoint=chat_completion)
|
||||||
|
request = Request(
|
||||||
|
{
|
||||||
|
"type": "http",
|
||||||
|
"route": api_route,
|
||||||
|
"path": api_route.path,
|
||||||
|
"headers": [
|
||||||
|
(b"x-litellm-key", b"Bearer sk-1234"),
|
||||||
|
],
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
# this should pass because we pass the master key as X-Litellm-Key and litellm_key_header_name="X-Litellm-Key" in general settings
|
||||||
|
result = await user_api_key_auth(request=request, api_key="Bearer invalid-key")
|
||||||
|
|
||||||
|
# this should fail because X-Litellm-Key is invalid
|
||||||
|
request = Request(
|
||||||
|
{
|
||||||
|
"type": "http",
|
||||||
|
"route": api_route,
|
||||||
|
"path": api_route.path,
|
||||||
|
"headers": [],
|
||||||
|
}
|
||||||
|
)
|
||||||
|
try:
|
||||||
|
result = await user_api_key_auth(request=request, api_key="Bearer sk-1234")
|
||||||
|
pytest.fail(f"This should have failed!. invalid Auth on this request")
|
||||||
|
except Exception as e:
|
||||||
|
print("failed with error", e)
|
||||||
|
assert (
|
||||||
|
"No LiteLLM Virtual Key pass. Please set header=x-litellm-key: Bearer <api_key>"
|
||||||
|
in e.message
|
||||||
|
)
|
||||||
|
pass
|
||||||
|
|
||||||
|
# this should pass because X-Litellm-Key is valid
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue