mirror of
https://github.com/BerriAI/litellm.git
synced 2025-04-27 11:43:54 +00:00
(feat) proxy: use async_callback function
This commit is contained in:
parent
b5b366626f
commit
6b8d1a20f5
3 changed files with 52 additions and 0 deletions
|
@ -498,6 +498,18 @@ def load_router_config(router: Optional[litellm.Router], config_file_path: str):
|
|||
)
|
||||
elif key == "callbacks":
|
||||
litellm.callbacks = [get_instance_fn(value=value)]
|
||||
elif key == "success_callback":
|
||||
litellm.success_callback = []
|
||||
|
||||
# intialize success callbacks
|
||||
for callback in value:
|
||||
# user passed custom_callbacks.async_on_succes_logger. They need us to import a function
|
||||
if "." in callback:
|
||||
litellm.success_callback.append(get_instance_fn(value=callback))
|
||||
# these are litellm callbacks - "langfuse", "sentry", "wandb"
|
||||
else:
|
||||
litellm.success_callback.append(callback)
|
||||
print_verbose(f"{blue_color_code} Initialized Success Callbacks - {litellm.success_callback} {reset_color_code}")
|
||||
else:
|
||||
setattr(litellm, key, value)
|
||||
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue