diff --git a/litellm/proxy/management_helpers/audit_logs.py b/litellm/proxy/management_helpers/audit_logs.py new file mode 100644 index 000000000..27da9911a --- /dev/null +++ b/litellm/proxy/management_helpers/audit_logs.py @@ -0,0 +1,43 @@ +""" +Functions to create audit logs for LiteLLM Proxy +""" + +import json + +import litellm +from litellm._logging import verbose_proxy_logger +from litellm.proxy._types import LiteLLM_AuditLogs + + +async def create_audit_log_for_update(request_data: LiteLLM_AuditLogs): + from litellm.proxy.proxy_server import premium_user, prisma_client + + if premium_user is not True: + return + + if litellm.store_audit_logs is not True: + return + if prisma_client is None: + raise Exception("prisma_client is None, no DB connected") + + verbose_proxy_logger.debug("creating audit log for %s", request_data) + + if isinstance(request_data.updated_values, dict): + request_data.updated_values = json.dumps(request_data.updated_values) + + if isinstance(request_data.before_value, dict): + request_data.before_value = json.dumps(request_data.before_value) + + _request_data = request_data.dict(exclude_none=True) + + try: + await prisma_client.db.litellm_auditlog.create( + data={ + **_request_data, # type: ignore + } + ) + except Exception as e: + # [Non-Blocking Exception. Do not allow blocking LLM API call] + verbose_proxy_logger.error(f"Failed Creating audit log {e}") + + return diff --git a/litellm/proxy/proxy_server.py b/litellm/proxy/proxy_server.py index bcdd4e86c..26ff04d87 100644 --- a/litellm/proxy/proxy_server.py +++ b/litellm/proxy/proxy_server.py @@ -194,6 +194,7 @@ from litellm.proxy.management_endpoints.team_callback_endpoints import ( ) from litellm.proxy.management_endpoints.team_endpoints import router as team_router from litellm.proxy.management_endpoints.ui_sso import router as ui_sso_router +from litellm.proxy.management_helpers.audit_logs import create_audit_log_for_update from litellm.proxy.openai_files_endpoints.files_endpoints import is_known_model from litellm.proxy.openai_files_endpoints.files_endpoints import ( router as openai_files_router,