(fix) proxy setting success callbacks

This commit is contained in:
ishaan-jaff 2024-03-08 16:27:53 -08:00
parent 2145cffb6c
commit 8b5d255bee
2 changed files with 5 additions and 8 deletions

View file

@ -5,12 +5,9 @@ model_list:
api_base: os.environ/AZURE_API_BASE api_base: os.environ/AZURE_API_BASE
api_key: os.environ/AZURE_API_KEY api_key: os.environ/AZURE_API_KEY
api_version: "2023-07-01-preview" api_version: "2023-07-01-preview"
- model_name: azure-gpt-3.5 litellm_settings:
litellm_params: set_verbose: True
model: gpt-3.5-turbo success_callback: ["langfuse"]
api_key: os.environ/OPENAI_API_KEY
model_info:
access_groups: ["public"]
router_settings: router_settings:
set_verbose: True set_verbose: True
debug_level: "DEBUG" debug_level: "DEBUG"

View file

@ -1675,9 +1675,9 @@ class ProxyConfig:
# these are litellm callbacks - "langfuse", "sentry", "wandb" # these are litellm callbacks - "langfuse", "sentry", "wandb"
else: else:
litellm.success_callback.append(callback) litellm.success_callback.append(callback)
verbose_proxy_logger.debug( print( # noqa
f"{blue_color_code} Initialized Success Callbacks - {litellm.success_callback} {reset_color_code}" f"{blue_color_code} Initialized Success Callbacks - {litellm.success_callback} {reset_color_code}"
) ) # noqa
elif key == "failure_callback": elif key == "failure_callback":
litellm.failure_callback = [] litellm.failure_callback = []