mirror of
https://github.com/BerriAI/litellm.git
synced 2025-04-27 03:34:10 +00:00
allow requiring auth for /metrics endpoint
This commit is contained in:
parent
3939ebb990
commit
1260d616dc
5 changed files with 87 additions and 24 deletions
|
@ -123,6 +123,7 @@ callbacks: List[
|
||||||
langfuse_default_tags: Optional[List[str]] = None
|
langfuse_default_tags: Optional[List[str]] = None
|
||||||
langsmith_batch_size: Optional[int] = None
|
langsmith_batch_size: Optional[int] = None
|
||||||
prometheus_initialize_budget_metrics: Optional[bool] = False
|
prometheus_initialize_budget_metrics: Optional[bool] = False
|
||||||
|
require_auth_for_metrics_endpoint: Optional[bool] = False
|
||||||
argilla_batch_size: Optional[int] = None
|
argilla_batch_size: Optional[int] = None
|
||||||
datadog_use_v1: Optional[bool] = False # if you want to use v1 datadog logged payload
|
datadog_use_v1: Optional[bool] = False # if you want to use v1 datadog logged payload
|
||||||
gcs_pub_sub_use_v1: Optional[
|
gcs_pub_sub_use_v1: Optional[
|
||||||
|
|
|
@ -1721,6 +1721,80 @@ class PrometheusLogger(CustomLogger):
|
||||||
return (end_time - start_time).total_seconds()
|
return (end_time - start_time).total_seconds()
|
||||||
return None
|
return None
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def _mount_metrics_endpoint(premium_user: bool):
|
||||||
|
"""
|
||||||
|
Mount the Prometheus metrics endpoint with optional authentication.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
premium_user (bool): Whether the user is a premium user
|
||||||
|
require_auth (bool, optional): Whether to require authentication for the metrics endpoint.
|
||||||
|
Defaults to False.
|
||||||
|
"""
|
||||||
|
from prometheus_client import make_asgi_app
|
||||||
|
|
||||||
|
from litellm._logging import verbose_proxy_logger
|
||||||
|
from litellm.proxy._types import CommonProxyErrors
|
||||||
|
from litellm.proxy.proxy_server import app
|
||||||
|
|
||||||
|
if premium_user is not True:
|
||||||
|
verbose_proxy_logger.warning(
|
||||||
|
f"Prometheus metrics are only available for premium users. {CommonProxyErrors.not_premium_user.value}"
|
||||||
|
)
|
||||||
|
|
||||||
|
if PrometheusLogger._should_init_metrics_with_auth():
|
||||||
|
PrometheusLogger._mount_metrics_endpoint_with_auth()
|
||||||
|
else:
|
||||||
|
# Mount metrics directly without authentication
|
||||||
|
PrometheusLogger._mount_metrics_endpoint_without_auth()
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def _mount_metrics_endpoint_without_auth():
|
||||||
|
from prometheus_client import make_asgi_app
|
||||||
|
|
||||||
|
from litellm._logging import verbose_proxy_logger
|
||||||
|
from litellm.proxy.proxy_server import app
|
||||||
|
|
||||||
|
# Create metrics ASGI app
|
||||||
|
metrics_app = make_asgi_app()
|
||||||
|
|
||||||
|
# Mount the metrics app to the app
|
||||||
|
app.mount("/metrics", metrics_app)
|
||||||
|
verbose_proxy_logger.debug(
|
||||||
|
"Starting Prometheus Metrics on /metrics (no authentication)"
|
||||||
|
)
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def _mount_metrics_endpoint_with_auth():
|
||||||
|
from fastapi import APIRouter, Depends
|
||||||
|
from prometheus_client import make_asgi_app
|
||||||
|
|
||||||
|
from litellm._logging import verbose_proxy_logger
|
||||||
|
from litellm.proxy.auth.user_api_key_auth import user_api_key_auth
|
||||||
|
from litellm.proxy.proxy_server import app
|
||||||
|
|
||||||
|
# Create metrics ASGI app
|
||||||
|
metrics_app = make_asgi_app()
|
||||||
|
|
||||||
|
# Create a router for authenticated metrics
|
||||||
|
metrics_router = APIRouter()
|
||||||
|
|
||||||
|
# Add metrics endpoint with authentication
|
||||||
|
@metrics_router.get("/metrics", dependencies=[Depends(user_api_key_auth)])
|
||||||
|
async def authenticated_metrics():
|
||||||
|
verbose_proxy_logger.debug("Serving authenticated metrics endpoint")
|
||||||
|
return metrics_app
|
||||||
|
|
||||||
|
# Mount the router to the app
|
||||||
|
app.include_router(metrics_router)
|
||||||
|
verbose_proxy_logger.debug(
|
||||||
|
"Starting Prometheus Metrics on /metrics with authentication"
|
||||||
|
)
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def _should_init_metrics_with_auth():
|
||||||
|
return litellm.require_auth_for_metrics_endpoint
|
||||||
|
|
||||||
|
|
||||||
def prometheus_label_factory(
|
def prometheus_label_factory(
|
||||||
supported_enum_labels: List[str],
|
supported_enum_labels: List[str],
|
||||||
|
|
|
@ -224,18 +224,9 @@ def initialize_callbacks_on_proxy( # noqa: PLR0915
|
||||||
litellm.callbacks = imported_list # type: ignore
|
litellm.callbacks = imported_list # type: ignore
|
||||||
|
|
||||||
if "prometheus" in value:
|
if "prometheus" in value:
|
||||||
if premium_user is not True:
|
from litellm.integrations.prometheus import PrometheusLogger
|
||||||
verbose_proxy_logger.warning(
|
|
||||||
f"Prometheus metrics are only available for premium users. {CommonProxyErrors.not_premium_user.value}"
|
|
||||||
)
|
|
||||||
from litellm.proxy.proxy_server import app
|
|
||||||
|
|
||||||
verbose_proxy_logger.debug("Starting Prometheus Metrics on /metrics")
|
PrometheusLogger._mount_metrics_endpoint(premium_user)
|
||||||
from prometheus_client import make_asgi_app
|
|
||||||
|
|
||||||
# Add prometheus asgi middleware to route /metrics requests
|
|
||||||
metrics_app = make_asgi_app()
|
|
||||||
app.mount("/metrics", metrics_app)
|
|
||||||
else:
|
else:
|
||||||
litellm.callbacks = [
|
litellm.callbacks = [
|
||||||
get_instance_fn(
|
get_instance_fn(
|
||||||
|
|
|
@ -4,12 +4,12 @@ model_list:
|
||||||
model: openai/fake
|
model: openai/fake
|
||||||
api_key: fake-key
|
api_key: fake-key
|
||||||
api_base: https://exampleopenaiendpoint-production.up.railway.app/
|
api_base: https://exampleopenaiendpoint-production.up.railway.app/
|
||||||
|
- model_name: openai/gpt-4o
|
||||||
general_settings:
|
litellm_params:
|
||||||
use_redis_transaction_buffer: true
|
model: openai/gpt-4o
|
||||||
|
api_key: fake-key
|
||||||
|
|
||||||
litellm_settings:
|
litellm_settings:
|
||||||
cache: True
|
require_auth_for_metrics_endpoint: true
|
||||||
cache_params:
|
callbacks: ["prometheus"]
|
||||||
type: redis
|
service_callback: ["prometheus_system"]
|
||||||
supported_call_types: []
|
|
|
@ -1676,14 +1676,11 @@ class ProxyConfig:
|
||||||
callback
|
callback
|
||||||
)
|
)
|
||||||
if "prometheus" in callback:
|
if "prometheus" in callback:
|
||||||
verbose_proxy_logger.debug(
|
from litellm.integrations.prometheus import (
|
||||||
"Starting Prometheus Metrics on /metrics"
|
PrometheusLogger,
|
||||||
)
|
)
|
||||||
from prometheus_client import make_asgi_app
|
|
||||||
|
|
||||||
# Add prometheus asgi middleware to route /metrics requests
|
PrometheusLogger._mount_metrics_endpoint(premium_user)
|
||||||
metrics_app = make_asgi_app()
|
|
||||||
app.mount("/metrics", metrics_app)
|
|
||||||
print( # noqa
|
print( # noqa
|
||||||
f"{blue_color_code} Initialized Success Callbacks - {litellm.success_callback} {reset_color_code}"
|
f"{blue_color_code} Initialized Success Callbacks - {litellm.success_callback} {reset_color_code}"
|
||||||
) # noqa
|
) # noqa
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue