mirror of
https://github.com/BerriAI/litellm.git
synced 2025-04-26 11:14:04 +00:00
Merge pull request #2591 from BerriAI/litellm_metrics_endpoint
[Feat] /metrics endpoint for Prometheus, Grafana
This commit is contained in:
commit
c94bc94ad5
5 changed files with 134 additions and 2 deletions
|
@ -1872,6 +1872,15 @@ class ProxyConfig:
|
|||
# these are litellm callbacks - "langfuse", "sentry", "wandb"
|
||||
else:
|
||||
litellm.success_callback.append(callback)
|
||||
if "prometheus" in callback:
|
||||
verbose_proxy_logger.debug(
|
||||
"Starting Prometheus Metrics on /metrics"
|
||||
)
|
||||
from prometheus_client import make_asgi_app
|
||||
|
||||
# Add prometheus asgi middleware to route /metrics requests
|
||||
metrics_app = make_asgi_app()
|
||||
app.mount("/metrics", metrics_app)
|
||||
print( # noqa
|
||||
f"{blue_color_code} Initialized Success Callbacks - {litellm.success_callback} {reset_color_code}"
|
||||
) # noqa
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue