Merge pull request #3910 from BerriAI/litellm_fix_end_user_max_budget

fix(proxy_server.py): fix end user object check when master key used
This commit is contained in:
Krish Dholakia 2024-05-29 18:04:56 -07:00 committed by GitHub
commit 7ef23e6fe9
5 changed files with 92 additions and 3 deletions

View file

@ -314,6 +314,7 @@ class BudgetExceededError(Exception):
self.current_cost = current_cost
self.max_budget = max_budget
message = f"Budget has been exceeded! Current cost: {current_cost}, Max budget: {max_budget}"
self.message = message
super().__init__(message)

View file

@ -1443,7 +1443,9 @@ Model Info:
if response.status_code == 200:
pass
else:
print("Error sending slack alert. Error=", response.text) # noqa
verbose_proxy_logger.debug(
"Error sending slack alert. Error=", response.text
)
async def async_log_success_event(self, kwargs, response_obj, start_time, end_time):
"""Log deployment latency"""

View file

@ -193,13 +193,27 @@ async def get_end_user_object(
if end_user_id is None:
return None
_key = "end_user_id:{}".format(end_user_id)
def check_in_budget(end_user_obj: LiteLLM_EndUserTable):
if end_user_obj.litellm_budget_table is None:
return
end_user_budget = end_user_obj.litellm_budget_table.max_budget
if end_user_budget is not None and end_user_obj.spend > end_user_budget:
raise litellm.BudgetExceededError(
current_cost=end_user_obj.spend, max_budget=end_user_budget
)
# check if in cache
cached_user_obj = await user_api_key_cache.async_get_cache(key=_key)
if cached_user_obj is not None:
if isinstance(cached_user_obj, dict):
return LiteLLM_EndUserTable(**cached_user_obj)
return_obj = LiteLLM_EndUserTable(**cached_user_obj)
check_in_budget(end_user_obj=return_obj)
return return_obj
elif isinstance(cached_user_obj, LiteLLM_EndUserTable):
return cached_user_obj
return_obj = cached_user_obj
check_in_budget(end_user_obj=return_obj)
return return_obj
# else, check db
try:
response = await prisma_client.db.litellm_endusertable.find_unique(
@ -217,8 +231,12 @@ async def get_end_user_object(
_response = LiteLLM_EndUserTable(**response.dict())
check_in_budget(end_user_obj=_response)
return _response
except Exception as e: # if end-user not in db
if isinstance(e, litellm.BudgetExceededError):
raise e
return None

View file

@ -722,6 +722,8 @@ async def user_api_key_auth(
budget_info.max_budget
)
except Exception as e:
if isinstance(e, litellm.BudgetExceededError):
raise e
verbose_proxy_logger.debug(
"Unable to find user in db. Error - {}".format(str(e))
)
@ -1410,6 +1412,10 @@ async def user_api_key_auth(
raise Exception()
except Exception as e:
traceback.print_exc()
if isinstance(e, litellm.BudgetExceededError):
raise ProxyException(
message=e.message, type="auth_error", param=None, code=400
)
if isinstance(e, HTTPException):
raise ProxyException(
message=getattr(e, "detail", f"Authentication Error({str(e)})"),

View file

@ -0,0 +1,62 @@
# What is this?
## Tests if 'get_end_user_object' works as expected
import sys, os, asyncio, time, random, uuid
import traceback
from dotenv import load_dotenv
load_dotenv()
import os
sys.path.insert(
0, os.path.abspath("../..")
) # Adds the parent directory to the system path
import pytest, litellm
from litellm.proxy.auth.auth_checks import get_end_user_object
from litellm.caching import DualCache
from litellm.proxy._types import LiteLLM_EndUserTable, LiteLLM_BudgetTable
from litellm.proxy.utils import PrismaClient
@pytest.mark.parametrize("customer_spend, customer_budget", [(0, 10), (10, 0)])
@pytest.mark.asyncio
async def test_get_end_user_object(customer_spend, customer_budget):
"""
Scenario 1: normal
Scenario 2: user over budget
"""
end_user_id = "my-test-customer"
_budget = LiteLLM_BudgetTable(max_budget=customer_budget)
end_user_obj = LiteLLM_EndUserTable(
user_id=end_user_id,
spend=customer_spend,
litellm_budget_table=_budget,
blocked=False,
)
_cache = DualCache()
_key = "end_user_id:{}".format(end_user_id)
_cache.set_cache(key=_key, value=end_user_obj)
try:
await get_end_user_object(
end_user_id=end_user_id,
prisma_client="RANDOM VALUE", # type: ignore
user_api_key_cache=_cache,
)
if customer_spend > customer_budget:
pytest.fail(
"Expected call to fail. Customer Spend={}, Customer Budget={}".format(
customer_spend, customer_budget
)
)
except Exception as e:
if (
isinstance(e, litellm.BudgetExceededError)
and customer_spend > customer_budget
):
pass
else:
pytest.fail(
"Expected call to work. Customer Spend={}, Customer Budget={}, Error={}".format(
customer_spend, customer_budget, str(e)
)
)