mirror of
https://github.com/BerriAI/litellm.git
synced 2025-04-25 18:54:30 +00:00
_should_run_auth_on_metrics_endpoint
This commit is contained in:
parent
c7523818b4
commit
96ce5dbf7d
2 changed files with 22 additions and 7 deletions
|
@ -5,6 +5,7 @@ from fastapi import Request
|
|||
from starlette.middleware.base import BaseHTTPMiddleware
|
||||
|
||||
import litellm
|
||||
from litellm.proxy._types import SpecialHeaders
|
||||
from litellm.proxy.auth.user_api_key_auth import user_api_key_auth
|
||||
|
||||
|
||||
|
@ -13,12 +14,17 @@ class PrometheusAuthMiddleware(BaseHTTPMiddleware):
|
|||
# Check if this is a request to the metrics endpoint
|
||||
|
||||
if self._is_prometheus_metrics_endpoint(request):
|
||||
try:
|
||||
await user_api_key_auth(
|
||||
request=request, api_key=request.headers.get("Authorization") or ""
|
||||
)
|
||||
except Exception as e:
|
||||
raise e
|
||||
if self._should_run_auth_on_metrics_endpoint() is True:
|
||||
try:
|
||||
await user_api_key_auth(
|
||||
request=request,
|
||||
api_key=request.headers.get(
|
||||
SpecialHeaders.openai_authorization.value
|
||||
)
|
||||
or "",
|
||||
)
|
||||
except Exception as e:
|
||||
raise e
|
||||
|
||||
# Process the request and get the response
|
||||
response = await call_next(request)
|
||||
|
@ -36,4 +42,14 @@ class PrometheusAuthMiddleware(BaseHTTPMiddleware):
|
|||
|
||||
@staticmethod
|
||||
def _should_run_auth_on_metrics_endpoint():
|
||||
"""
|
||||
Returns True if auth should be run on the metrics endpoint
|
||||
|
||||
False by default, set to True in proxy_config.yaml to enable
|
||||
|
||||
```yaml
|
||||
litellm_settings:
|
||||
require_auth_for_metrics_endpoint: true
|
||||
```
|
||||
"""
|
||||
return litellm.require_auth_for_metrics_endpoint
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue