mirror of
https://github.com/BerriAI/litellm.git
synced 2025-04-25 18:54:30 +00:00
(docs) proxy - OTEL + traceloop
This commit is contained in:
parent
13731f4bd8
commit
0dc7d538b5
2 changed files with 46 additions and 39 deletions
|
@ -523,6 +523,10 @@ def load_router_config(router: Optional[litellm.Router], config_file_path: str):
|
|||
# these are litellm callbacks - "langfuse", "sentry", "wandb"
|
||||
else:
|
||||
litellm.success_callback.append(callback)
|
||||
if callback == "traceloop":
|
||||
from traceloop.sdk import Traceloop
|
||||
print_verbose(f"{blue_color_code} Initializing Traceloop SDK - \nRunning:`Traceloop.init(app_name='Litellm-Server', disable_batch=True)`")
|
||||
Traceloop.init(app_name="Litellm-Server", disable_batch=True)
|
||||
print_verbose(f"{blue_color_code} Initialized Success Callbacks - {litellm.success_callback} {reset_color_code}")
|
||||
elif key == "failure_callback":
|
||||
litellm.failure_callback = []
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue