feat(model_management_endpoints.py): emit audit logs on model delete

This commit is contained in:
Krrish Dholakia 2025-03-13 18:48:38 -07:00
parent ac7c607a5f
commit e1476bbc51
6 changed files with 175 additions and 90 deletions

View file

@ -133,7 +133,7 @@ class LitellmTableNames(str, enum.Enum):
TEAM_TABLE_NAME = "LiteLLM_TeamTable" TEAM_TABLE_NAME = "LiteLLM_TeamTable"
USER_TABLE_NAME = "LiteLLM_UserTable" USER_TABLE_NAME = "LiteLLM_UserTable"
KEY_TABLE_NAME = "LiteLLM_VerificationToken" KEY_TABLE_NAME = "LiteLLM_VerificationToken"
PROXY_MODEL_TABLE_NAME = "LiteLLM_ModelTable" PROXY_MODEL_TABLE_NAME = "LiteLLM_ProxyModelTable"
def hash_token(token: str): def hash_token(token: str):
@ -1588,7 +1588,7 @@ class LiteLLM_UserTable(LiteLLMPydanticObjectBase):
spend: float = 0.0 spend: float = 0.0
model_max_budget: Optional[Dict] = {} model_max_budget: Optional[Dict] = {}
model_spend: Optional[Dict] = {} model_spend: Optional[Dict] = {}
user_email: Optional[str] user_email: Optional[str] = None
models: list = [] models: list = []
tpm_limit: Optional[int] = None tpm_limit: Optional[int] = None
rpm_limit: Optional[int] = None rpm_limit: Optional[int] = None
@ -1687,12 +1687,7 @@ class LiteLLM_AuditLogs(LiteLLMPydanticObjectBase):
changed_by: Optional[Any] = None changed_by: Optional[Any] = None
changed_by_api_key: Optional[str] = None changed_by_api_key: Optional[str] = None
action: AUDIT_ACTIONS action: AUDIT_ACTIONS
table_name: Literal[ table_name: LitellmTableNames
LitellmTableNames.TEAM_TABLE_NAME,
LitellmTableNames.USER_TABLE_NAME,
LitellmTableNames.KEY_TABLE_NAME,
LitellmTableNames.PROXY_MODEL_TABLE_NAME,
]
object_id: str object_id: str
before_value: Optional[Json] = None before_value: Optional[Json] = None
updated_values: Optional[Json] = None updated_values: Optional[Json] = None

View file

@ -1,11 +1,18 @@
from typing import Any, Union import uuid
from datetime import datetime, timezone
from typing import Any, Optional, Union
import litellm
from litellm.proxy._types import ( from litellm.proxy._types import (
AUDIT_ACTIONS,
GenerateKeyRequest, GenerateKeyRequest,
LiteLLM_AuditLogs,
LiteLLM_ManagementEndpoint_MetadataFields_Premium, LiteLLM_ManagementEndpoint_MetadataFields_Premium,
LiteLLM_TeamTable, LiteLLM_TeamTable,
LitellmTableNames,
UserAPIKeyAuth, UserAPIKeyAuth,
) )
from litellm.proxy.management_helpers.audit_logs import create_audit_log_for_update
from litellm.proxy.utils import _premium_user_check from litellm.proxy.utils import _premium_user_check

View file

@ -826,7 +826,7 @@ async def user_update(
return response # type: ignore return response # type: ignore
# update based on remaining passed in values # update based on remaining passed in values
except Exception as e: except Exception as e:
verbose_proxy_logger.error( verbose_proxy_logger.exception(
"litellm.proxy.proxy_server.user_update(): Exception occured - {}".format( "litellm.proxy.proxy_server.user_update(): Exception occured - {}".format(
str(e) str(e)
) )

View file

@ -10,19 +10,26 @@ model/{model_id}/update - PATCH endpoint for model update.
#### MODEL MANAGEMENT #### #### MODEL MANAGEMENT ####
import asyncio
import json import json
import uuid import uuid
from datetime import datetime, timezone
from typing import Optional, cast from typing import Optional, cast
from fastapi import APIRouter, Depends, HTTPException, Request, status from fastapi import APIRouter, Depends, HTTPException, Request, status
from pydantic import BaseModel from pydantic import BaseModel
import litellm
from litellm._logging import verbose_proxy_logger from litellm._logging import verbose_proxy_logger
from litellm.constants import LITELLM_PROXY_ADMIN_NAME from litellm.constants import LITELLM_PROXY_ADMIN_NAME
from litellm.proxy._types import ( from litellm.proxy._types import (
AUDIT_ACTIONS,
CommonProxyErrors, CommonProxyErrors,
LiteLLM_AuditLogs,
LiteLLM_ProxyModelTable, LiteLLM_ProxyModelTable,
LitellmTableNames,
LitellmUserRoles, LitellmUserRoles,
ModelInfoDelete,
PrismaCompatibleUpdateDBModel, PrismaCompatibleUpdateDBModel,
ProxyErrorTypes, ProxyErrorTypes,
ProxyException, ProxyException,
@ -36,6 +43,7 @@ from litellm.proxy.management_endpoints.team_endpoints import (
team_model_add, team_model_add,
update_team, update_team,
) )
from litellm.proxy.management_helpers.audit_logs import create_object_audit_log
from litellm.proxy.utils import PrismaClient from litellm.proxy.utils import PrismaClient
from litellm.types.router import ( from litellm.types.router import (
Deployment, Deployment,
@ -329,3 +337,101 @@ def check_if_team_id_matches_key(
if user_api_key_dict.team_id != team_id: if user_api_key_dict.team_id != team_id:
can_make_call = False can_make_call = False
return can_make_call return can_make_call
#### [BETA] - This is a beta endpoint, format might change based on user feedback. - https://github.com/BerriAI/litellm/issues/964
@router.post(
"/model/delete",
description="Allows deleting models in the model list in the config.yaml",
tags=["model management"],
dependencies=[Depends(user_api_key_auth)],
)
async def delete_model(
model_info: ModelInfoDelete,
user_api_key_dict: UserAPIKeyAuth = Depends(user_api_key_auth),
):
global llm_router, llm_model_list, general_settings, user_config_file_path, proxy_config
try:
"""
[BETA] - This is a beta endpoint, format might change based on user feedback. - https://github.com/BerriAI/litellm/issues/964
- Check if id in db
- Delete
"""
from litellm.proxy.proxy_server import (
llm_router,
prisma_client,
store_model_in_db,
)
if prisma_client is None:
raise HTTPException(
status_code=500,
detail={
"error": "No DB Connected. Here's how to do it - https://docs.litellm.ai/docs/proxy/virtual_keys"
},
)
# update DB
if store_model_in_db is True:
"""
- store model_list in db
- store keys separately
"""
# encrypt litellm params #
result = await prisma_client.db.litellm_proxymodeltable.delete(
where={"model_id": model_info.id}
)
if result is None:
raise HTTPException(
status_code=400,
detail={"error": f"Model with id={model_info.id} not found in db"},
)
## DELETE FROM ROUTER ##
if llm_router is not None:
llm_router.delete_deployment(id=model_info.id)
## CREATE AUDIT LOG ##
asyncio.create_task(
create_object_audit_log(
object_id=model_info.id,
action="deleted",
user_api_key_dict=user_api_key_dict,
table_name=LitellmTableNames.PROXY_MODEL_TABLE_NAME,
before_value=result.model_dump_json(exclude_none=True),
after_value=None,
litellm_changed_by=user_api_key_dict.user_id,
litellm_proxy_admin_name=LITELLM_PROXY_ADMIN_NAME,
)
)
return {"message": f"Model: {result.model_id} deleted successfully"}
else:
raise HTTPException(
status_code=500,
detail={
"error": "Set `'STORE_MODEL_IN_DB='True'` in your env to enable this feature."
},
)
except Exception as e:
verbose_proxy_logger.exception(
f"Failed to delete model. Due to error - {str(e)}"
)
if isinstance(e, HTTPException):
raise ProxyException(
message=getattr(e, "detail", f"Authentication Error({str(e)})"),
type=ProxyErrorTypes.auth_error,
param=getattr(e, "param", "None"),
code=getattr(e, "status_code", status.HTTP_400_BAD_REQUEST),
)
elif isinstance(e, ProxyException):
raise e
raise ProxyException(
message="Authentication Error, " + str(e),
type=ProxyErrorTypes.auth_error,
param=getattr(e, "param", "None"),
code=status.HTTP_400_BAD_REQUEST,
)

View file

@ -3,13 +3,68 @@ Functions to create audit logs for LiteLLM Proxy
""" """
import json import json
import uuid
from datetime import datetime, timezone
import litellm import litellm
from litellm._logging import verbose_proxy_logger from litellm._logging import verbose_proxy_logger
from litellm.proxy._types import LiteLLM_AuditLogs from litellm.proxy._types import (
AUDIT_ACTIONS,
LiteLLM_AuditLogs,
LitellmTableNames,
Optional,
UserAPIKeyAuth,
)
async def create_object_audit_log(
object_id: str,
action: AUDIT_ACTIONS,
litellm_changed_by: Optional[str],
user_api_key_dict: UserAPIKeyAuth,
litellm_proxy_admin_name: Optional[str],
table_name: LitellmTableNames,
before_value: Optional[str] = None,
after_value: Optional[str] = None,
):
"""
Create an audit log for an internal user.
Parameters:
- user_id: str - The id of the user to create the audit log for.
- action: AUDIT_ACTIONS - The action to create the audit log for.
- user_row: LiteLLM_UserTable - The user row to create the audit log for.
- litellm_changed_by: Optional[str] - The user id of the user who is changing the user.
- user_api_key_dict: UserAPIKeyAuth - The user api key dictionary.
- litellm_proxy_admin_name: Optional[str] - The name of the proxy admin.
"""
if not litellm.store_audit_logs:
return
await create_audit_log_for_update(
request_data=LiteLLM_AuditLogs(
id=str(uuid.uuid4()),
updated_at=datetime.now(timezone.utc),
changed_by=litellm_changed_by
or user_api_key_dict.user_id
or litellm_proxy_admin_name,
changed_by_api_key=user_api_key_dict.api_key,
table_name=table_name,
object_id=object_id,
action=action,
updated_values=after_value,
before_value=before_value,
)
)
async def create_audit_log_for_update(request_data: LiteLLM_AuditLogs): async def create_audit_log_for_update(request_data: LiteLLM_AuditLogs):
"""
Create an audit log for an object.
"""
if not litellm.store_audit_logs:
return
from litellm.proxy.proxy_server import premium_user, prisma_client from litellm.proxy.proxy_server import premium_user, prisma_client
if premium_user is not True: if premium_user is not True:

View file

@ -3182,7 +3182,7 @@ class ProxyStartupEvent:
# add proxy budget to db in the user table # add proxy budget to db in the user table
asyncio.create_task( asyncio.create_task(
generate_key_helper_fn( generate_key_helper_fn( # type: ignore
request_type="user", request_type="user",
user_id=litellm_proxy_budget_name, user_id=litellm_proxy_budget_name,
duration=None, duration=None,
@ -6592,84 +6592,6 @@ async def model_group_info(
return {"data": model_groups} return {"data": model_groups}
#### [BETA] - This is a beta endpoint, format might change based on user feedback. - https://github.com/BerriAI/litellm/issues/964
@router.post(
"/model/delete",
description="Allows deleting models in the model list in the config.yaml",
tags=["model management"],
dependencies=[Depends(user_api_key_auth)],
)
async def delete_model(model_info: ModelInfoDelete):
global llm_router, llm_model_list, general_settings, user_config_file_path, proxy_config
try:
"""
[BETA] - This is a beta endpoint, format might change based on user feedback. - https://github.com/BerriAI/litellm/issues/964
- Check if id in db
- Delete
"""
global prisma_client
if prisma_client is None:
raise HTTPException(
status_code=500,
detail={
"error": "No DB Connected. Here's how to do it - https://docs.litellm.ai/docs/proxy/virtual_keys"
},
)
# update DB
if store_model_in_db is True:
"""
- store model_list in db
- store keys separately
"""
# encrypt litellm params #
result = await prisma_client.db.litellm_proxymodeltable.delete(
where={"model_id": model_info.id}
)
if result is None:
raise HTTPException(
status_code=400,
detail={"error": f"Model with id={model_info.id} not found in db"},
)
## DELETE FROM ROUTER ##
if llm_router is not None:
llm_router.delete_deployment(id=model_info.id)
return {"message": f"Model: {result.model_id} deleted successfully"}
else:
raise HTTPException(
status_code=500,
detail={
"error": "Set `'STORE_MODEL_IN_DB='True'` in your env to enable this feature."
},
)
except Exception as e:
verbose_proxy_logger.exception(
f"Failed to delete model. Due to error - {str(e)}"
)
if isinstance(e, HTTPException):
raise ProxyException(
message=getattr(e, "detail", f"Authentication Error({str(e)})"),
type=ProxyErrorTypes.auth_error,
param=getattr(e, "param", "None"),
code=getattr(e, "status_code", status.HTTP_400_BAD_REQUEST),
)
elif isinstance(e, ProxyException):
raise e
raise ProxyException(
message="Authentication Error, " + str(e),
type=ProxyErrorTypes.auth_error,
param=getattr(e, "param", "None"),
code=status.HTTP_400_BAD_REQUEST,
)
@router.get( @router.get(
"/model/settings", "/model/settings",
description="Returns provider name, description, and required parameters for each provider", description="Returns provider name, description, and required parameters for each provider",