mirror of
https://github.com/BerriAI/litellm.git
synced 2025-04-26 03:04:13 +00:00
(fix) - proxy reliability, ensure duplicate callbacks are not added to proxy (#8067)
* refactor _add_callbacks_from_db_config * fix check for _custom_logger_exists_in_litellm_callbacks * move loc of test utils * run ci/cd again * test_add_custom_logger_callback_to_specific_event_with_duplicates_callbacks * fix _custom_logger_class_exists_in_success_callbacks * unit testing for test_add_callbacks_from_db_config * test_custom_logger_exists_in_callbacks_individual_functions * fix config.yml * fix test test_stream_chunk_builder_openai_audio_output_usage - use direct dict comparison
This commit is contained in:
parent
ae7b042bc2
commit
b812286534
5 changed files with 398 additions and 15 deletions
|
@ -280,7 +280,7 @@ from litellm.types.router import RouterGeneralSettings, updateDeployment
|
|||
from litellm.types.utils import CustomHuggingfaceTokenizer
|
||||
from litellm.types.utils import ModelInfo as ModelMapInfo
|
||||
from litellm.types.utils import StandardLoggingPayload
|
||||
from litellm.utils import get_end_user_id_for_cost_tracking
|
||||
from litellm.utils import _add_custom_logger_callback_to_specific_event
|
||||
|
||||
try:
|
||||
from litellm._version import version
|
||||
|
@ -2401,13 +2401,12 @@ class ProxyConfig:
|
|||
added_models += 1
|
||||
return added_models
|
||||
|
||||
async def _update_llm_router( # noqa: PLR0915
|
||||
async def _update_llm_router(
|
||||
self,
|
||||
new_models: list,
|
||||
proxy_logging_obj: ProxyLogging,
|
||||
):
|
||||
global llm_router, llm_model_list, master_key, general_settings
|
||||
import base64
|
||||
|
||||
try:
|
||||
if llm_router is None and master_key is not None:
|
||||
|
@ -2463,21 +2462,60 @@ class ProxyConfig:
|
|||
|
||||
# check if user set any callbacks in Config Table
|
||||
config_data = await proxy_config.get_config()
|
||||
self._add_callbacks_from_db_config(config_data)
|
||||
|
||||
# we need to set env variables too
|
||||
self._add_environment_variables_from_db_config(config_data)
|
||||
|
||||
# router settings
|
||||
await self._add_router_settings_from_db_config(
|
||||
config_data=config_data, llm_router=llm_router, prisma_client=prisma_client
|
||||
)
|
||||
|
||||
# general settings
|
||||
self._add_general_settings_from_db_config(
|
||||
config_data=config_data,
|
||||
general_settings=general_settings,
|
||||
proxy_logging_obj=proxy_logging_obj,
|
||||
)
|
||||
|
||||
def _add_callbacks_from_db_config(self, config_data: dict) -> None:
|
||||
"""
|
||||
Adds callbacks from DB config to litellm
|
||||
"""
|
||||
litellm_settings = config_data.get("litellm_settings", {}) or {}
|
||||
success_callbacks = litellm_settings.get("success_callback", None)
|
||||
failure_callbacks = litellm_settings.get("failure_callback", None)
|
||||
|
||||
if success_callbacks is not None and isinstance(success_callbacks, list):
|
||||
for success_callback in success_callbacks:
|
||||
if success_callback not in litellm.success_callback:
|
||||
if (
|
||||
success_callback
|
||||
in litellm._known_custom_logger_compatible_callbacks
|
||||
):
|
||||
_add_custom_logger_callback_to_specific_event(
|
||||
success_callback, "success"
|
||||
)
|
||||
elif success_callback not in litellm.success_callback:
|
||||
litellm.success_callback.append(success_callback)
|
||||
|
||||
# Add failure callbacks from DB to litellm
|
||||
if failure_callbacks is not None and isinstance(failure_callbacks, list):
|
||||
for failure_callback in failure_callbacks:
|
||||
if failure_callback not in litellm.failure_callback:
|
||||
if (
|
||||
failure_callback
|
||||
in litellm._known_custom_logger_compatible_callbacks
|
||||
):
|
||||
_add_custom_logger_callback_to_specific_event(
|
||||
failure_callback, "failure"
|
||||
)
|
||||
elif failure_callback not in litellm.failure_callback:
|
||||
litellm.failure_callback.append(failure_callback)
|
||||
# we need to set env variables too
|
||||
|
||||
def _add_environment_variables_from_db_config(self, config_data: dict) -> None:
|
||||
"""
|
||||
Adds environment variables from DB config to litellm
|
||||
"""
|
||||
environment_variables = config_data.get("environment_variables", {})
|
||||
for k, v in environment_variables.items():
|
||||
try:
|
||||
|
@ -2489,7 +2527,15 @@ class ProxyConfig:
|
|||
"Error setting env variable: %s - %s", k, str(e)
|
||||
)
|
||||
|
||||
# router settings
|
||||
async def _add_router_settings_from_db_config(
|
||||
self,
|
||||
config_data: dict,
|
||||
llm_router: Optional[Router],
|
||||
prisma_client: Optional[PrismaClient],
|
||||
) -> None:
|
||||
"""
|
||||
Adds router settings from DB config to litellm proxy
|
||||
"""
|
||||
if llm_router is not None and prisma_client is not None:
|
||||
db_router_settings = await prisma_client.db.litellm_config.find_first(
|
||||
where={"param_name": "router_settings"}
|
||||
|
@ -2501,7 +2547,17 @@ class ProxyConfig:
|
|||
_router_settings = db_router_settings.param_value
|
||||
llm_router.update_settings(**_router_settings)
|
||||
|
||||
## ALERTING ## [TODO] move this to the _update_general_settings() block
|
||||
def _add_general_settings_from_db_config(
|
||||
self, config_data: dict, general_settings: dict, proxy_logging_obj: ProxyLogging
|
||||
) -> None:
|
||||
"""
|
||||
Adds general settings from DB config to litellm proxy
|
||||
|
||||
Args:
|
||||
config_data: dict
|
||||
general_settings: dict - global general_settings currently in use
|
||||
proxy_logging_obj: ProxyLogging
|
||||
"""
|
||||
_general_settings = config_data.get("general_settings", {})
|
||||
if "alerting" in _general_settings:
|
||||
if (
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue