mirror of
https://github.com/BerriAI/litellm.git
synced 2025-04-27 11:43:54 +00:00
correctly clear cache when updating a user
This commit is contained in:
parent
8cce7d2df1
commit
f2b0929284
1 changed files with 73 additions and 17 deletions
|
@ -8,10 +8,18 @@ from typing import Optional
|
||||||
from fastapi import Request
|
from fastapi import Request
|
||||||
|
|
||||||
from litellm._logging import verbose_logger
|
from litellm._logging import verbose_logger
|
||||||
from litellm.proxy._types import (
|
from litellm.proxy._types import ( # key request types; user request types; team request types; customer request types
|
||||||
|
DeleteCustomerRequest,
|
||||||
|
DeleteTeamRequest,
|
||||||
|
DeleteUserRequest,
|
||||||
|
KeyRequest,
|
||||||
LiteLLM_TeamTable,
|
LiteLLM_TeamTable,
|
||||||
ManagementEndpointLoggingPayload,
|
ManagementEndpointLoggingPayload,
|
||||||
Member,
|
Member,
|
||||||
|
UpdateCustomerRequest,
|
||||||
|
UpdateKeyRequest,
|
||||||
|
UpdateTeamRequest,
|
||||||
|
UpdateUserRequest,
|
||||||
UserAPIKeyAuth,
|
UserAPIKeyAuth,
|
||||||
)
|
)
|
||||||
from litellm.proxy.common_utils.http_parsing_utils import _read_request_body
|
from litellm.proxy.common_utils.http_parsing_utils import _read_request_body
|
||||||
|
@ -77,6 +85,66 @@ async def add_new_member(
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def _delete_user_id_from_cache(kwargs):
|
||||||
|
from litellm.proxy.proxy_server import user_api_key_cache
|
||||||
|
|
||||||
|
if kwargs.get("data") is not None:
|
||||||
|
update_user_request = kwargs.get("data")
|
||||||
|
if isinstance(update_user_request, UpdateUserRequest):
|
||||||
|
user_api_key_cache.delete_cache(key=update_user_request.user_id)
|
||||||
|
|
||||||
|
# delete user request
|
||||||
|
if isinstance(update_user_request, DeleteUserRequest):
|
||||||
|
for user_id in update_user_request.user_ids:
|
||||||
|
user_api_key_cache.delete_cache(key=user_id)
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
def _delete_api_key_from_cache(kwargs):
|
||||||
|
from litellm.proxy.proxy_server import user_api_key_cache
|
||||||
|
|
||||||
|
if kwargs.get("data") is not None:
|
||||||
|
update_request = kwargs.get("data")
|
||||||
|
if isinstance(update_request, UpdateKeyRequest):
|
||||||
|
user_api_key_cache.delete_cache(key=update_request.key)
|
||||||
|
|
||||||
|
# delete key request
|
||||||
|
if isinstance(update_request, KeyRequest):
|
||||||
|
for key in update_request.keys:
|
||||||
|
user_api_key_cache.delete_cache(key=key)
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
def _delete_team_id_from_cache(kwargs):
|
||||||
|
from litellm.proxy.proxy_server import user_api_key_cache
|
||||||
|
|
||||||
|
if kwargs.get("data") is not None:
|
||||||
|
update_request = kwargs.get("data")
|
||||||
|
if isinstance(update_request, UpdateTeamRequest):
|
||||||
|
user_api_key_cache.delete_cache(key=update_request.team_id)
|
||||||
|
|
||||||
|
# delete team request
|
||||||
|
if isinstance(update_request, DeleteTeamRequest):
|
||||||
|
for team_id in update_request.team_ids:
|
||||||
|
user_api_key_cache.delete_cache(key=team_id)
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
def _delete_customer_id_from_cache(kwargs):
|
||||||
|
from litellm.proxy.proxy_server import user_api_key_cache
|
||||||
|
|
||||||
|
if kwargs.get("data") is not None:
|
||||||
|
update_request = kwargs.get("data")
|
||||||
|
if isinstance(update_request, UpdateCustomerRequest):
|
||||||
|
user_api_key_cache.delete_cache(key=update_request.user_id)
|
||||||
|
|
||||||
|
# delete customer request
|
||||||
|
if isinstance(update_request, DeleteCustomerRequest):
|
||||||
|
for user_id in update_request.user_ids:
|
||||||
|
user_api_key_cache.delete_cache(key=user_id)
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
def management_endpoint_wrapper(func):
|
def management_endpoint_wrapper(func):
|
||||||
"""
|
"""
|
||||||
This wrapper does the following:
|
This wrapper does the following:
|
||||||
|
@ -123,22 +191,10 @@ def management_endpoint_wrapper(func):
|
||||||
logging_payload=logging_payload,
|
logging_payload=logging_payload,
|
||||||
parent_otel_span=parent_otel_span,
|
parent_otel_span=parent_otel_span,
|
||||||
)
|
)
|
||||||
if _http_request:
|
_delete_api_key_from_cache(kwargs=kwargs)
|
||||||
_route = _http_request.url.path
|
_delete_user_id_from_cache(kwargs=kwargs)
|
||||||
# Flush user_api_key cache if this was an update/delete call to /key, /team, or /user
|
_delete_team_id_from_cache(kwargs=kwargs)
|
||||||
if _route in [
|
_delete_customer_id_from_cache(kwargs=kwargs)
|
||||||
"/key/update",
|
|
||||||
"/key/delete",
|
|
||||||
"/team/update",
|
|
||||||
"/team/delete",
|
|
||||||
"/user/update",
|
|
||||||
"/user/delete",
|
|
||||||
"/customer/update",
|
|
||||||
"/customer/delete",
|
|
||||||
]:
|
|
||||||
from litellm.proxy.proxy_server import user_api_key_cache
|
|
||||||
|
|
||||||
user_api_key_cache.flush_cache()
|
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
# Non-Blocking Exception
|
# Non-Blocking Exception
|
||||||
verbose_logger.debug("Error in management endpoint wrapper: %s", str(e))
|
verbose_logger.debug("Error in management endpoint wrapper: %s", str(e))
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue