allow requiring auth for /metrics endpoint

This commit is contained in:
Ishaan Jaff 2025-04-04 17:35:02 -07:00
parent 3939ebb990
commit 1260d616dc
5 changed files with 87 additions and 24 deletions

View file

@ -123,6 +123,7 @@ callbacks: List[
langfuse_default_tags: Optional[List[str]] = None
langsmith_batch_size: Optional[int] = None
prometheus_initialize_budget_metrics: Optional[bool] = False
require_auth_for_metrics_endpoint: Optional[bool] = False
argilla_batch_size: Optional[int] = None
datadog_use_v1: Optional[bool] = False # if you want to use v1 datadog logged payload
gcs_pub_sub_use_v1: Optional[

View file

@ -1721,6 +1721,80 @@ class PrometheusLogger(CustomLogger):
return (end_time - start_time).total_seconds()
return None
@staticmethod
def _mount_metrics_endpoint(premium_user: bool):
"""
Mount the Prometheus metrics endpoint with optional authentication.
Args:
premium_user (bool): Whether the user is a premium user
require_auth (bool, optional): Whether to require authentication for the metrics endpoint.
Defaults to False.
"""
from prometheus_client import make_asgi_app
from litellm._logging import verbose_proxy_logger
from litellm.proxy._types import CommonProxyErrors
from litellm.proxy.proxy_server import app
if premium_user is not True:
verbose_proxy_logger.warning(
f"Prometheus metrics are only available for premium users. {CommonProxyErrors.not_premium_user.value}"
)
if PrometheusLogger._should_init_metrics_with_auth():
PrometheusLogger._mount_metrics_endpoint_with_auth()
else:
# Mount metrics directly without authentication
PrometheusLogger._mount_metrics_endpoint_without_auth()
@staticmethod
def _mount_metrics_endpoint_without_auth():
from prometheus_client import make_asgi_app
from litellm._logging import verbose_proxy_logger
from litellm.proxy.proxy_server import app
# Create metrics ASGI app
metrics_app = make_asgi_app()
# Mount the metrics app to the app
app.mount("/metrics", metrics_app)
verbose_proxy_logger.debug(
"Starting Prometheus Metrics on /metrics (no authentication)"
)
@staticmethod
def _mount_metrics_endpoint_with_auth():
from fastapi import APIRouter, Depends
from prometheus_client import make_asgi_app
from litellm._logging import verbose_proxy_logger
from litellm.proxy.auth.user_api_key_auth import user_api_key_auth
from litellm.proxy.proxy_server import app
# Create metrics ASGI app
metrics_app = make_asgi_app()
# Create a router for authenticated metrics
metrics_router = APIRouter()
# Add metrics endpoint with authentication
@metrics_router.get("/metrics", dependencies=[Depends(user_api_key_auth)])
async def authenticated_metrics():
verbose_proxy_logger.debug("Serving authenticated metrics endpoint")
return metrics_app
# Mount the router to the app
app.include_router(metrics_router)
verbose_proxy_logger.debug(
"Starting Prometheus Metrics on /metrics with authentication"
)
@staticmethod
def _should_init_metrics_with_auth():
return litellm.require_auth_for_metrics_endpoint
def prometheus_label_factory(
supported_enum_labels: List[str],

View file

@ -224,18 +224,9 @@ def initialize_callbacks_on_proxy( # noqa: PLR0915
litellm.callbacks = imported_list # type: ignore
if "prometheus" in value:
if premium_user is not True:
verbose_proxy_logger.warning(
f"Prometheus metrics are only available for premium users. {CommonProxyErrors.not_premium_user.value}"
)
from litellm.proxy.proxy_server import app
from litellm.integrations.prometheus import PrometheusLogger
verbose_proxy_logger.debug("Starting Prometheus Metrics on /metrics")
from prometheus_client import make_asgi_app
# Add prometheus asgi middleware to route /metrics requests
metrics_app = make_asgi_app()
app.mount("/metrics", metrics_app)
PrometheusLogger._mount_metrics_endpoint(premium_user)
else:
litellm.callbacks = [
get_instance_fn(

View file

@ -4,12 +4,12 @@ model_list:
model: openai/fake
api_key: fake-key
api_base: https://exampleopenaiendpoint-production.up.railway.app/
general_settings:
use_redis_transaction_buffer: true
- model_name: openai/gpt-4o
litellm_params:
model: openai/gpt-4o
api_key: fake-key
litellm_settings:
cache: True
cache_params:
type: redis
supported_call_types: []
require_auth_for_metrics_endpoint: true
callbacks: ["prometheus"]
service_callback: ["prometheus_system"]

View file

@ -1676,14 +1676,11 @@ class ProxyConfig:
callback
)
if "prometheus" in callback:
verbose_proxy_logger.debug(
"Starting Prometheus Metrics on /metrics"
from litellm.integrations.prometheus import (
PrometheusLogger,
)
from prometheus_client import make_asgi_app
# Add prometheus asgi middleware to route /metrics requests
metrics_app = make_asgi_app()
app.mount("/metrics", metrics_app)
PrometheusLogger._mount_metrics_endpoint(premium_user)
print( # noqa
f"{blue_color_code} Initialized Success Callbacks - {litellm.success_callback} {reset_color_code}"
) # noqa