mirror of
https://github.com/BerriAI/litellm.git
synced 2025-04-26 11:14:04 +00:00
refactor(proxy_server.py): show ttl's on a top-level enum
Addresses - https://github.com/BerriAI/litellm/issues/2649#issuecomment-2097203372
This commit is contained in:
parent
d94065ca43
commit
26c0ed0f2d
1 changed files with 14 additions and 4 deletions
|
@ -221,6 +221,12 @@ class ProxyException(Exception):
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
class UserAPIKeyCacheTTLEnum(enum.Enum):
|
||||||
|
key_information_cache = 600
|
||||||
|
user_information_cache = 600
|
||||||
|
global_proxy_spend = 60
|
||||||
|
|
||||||
|
|
||||||
@app.exception_handler(ProxyException)
|
@app.exception_handler(ProxyException)
|
||||||
async def openai_exception_handler(request: Request, exc: ProxyException):
|
async def openai_exception_handler(request: Request, exc: ProxyException):
|
||||||
# NOTE: DO NOT MODIFY THIS, its crucial to map to Openai exceptions
|
# NOTE: DO NOT MODIFY THIS, its crucial to map to Openai exceptions
|
||||||
|
@ -479,7 +485,7 @@ async def user_api_key_auth(
|
||||||
await user_api_key_cache.async_set_cache(
|
await user_api_key_cache.async_set_cache(
|
||||||
key="{}:spend".format(litellm_proxy_admin_name),
|
key="{}:spend".format(litellm_proxy_admin_name),
|
||||||
value=global_proxy_spend,
|
value=global_proxy_spend,
|
||||||
ttl=60,
|
ttl=UserAPIKeyCacheTTLEnum.global_proxy_spend.value,
|
||||||
)
|
)
|
||||||
if global_proxy_spend is not None:
|
if global_proxy_spend is not None:
|
||||||
user_info = {
|
user_info = {
|
||||||
|
@ -740,7 +746,9 @@ async def user_api_key_auth(
|
||||||
)
|
)
|
||||||
for _id in user_id_information:
|
for _id in user_id_information:
|
||||||
await user_api_key_cache.async_set_cache(
|
await user_api_key_cache.async_set_cache(
|
||||||
key=_id["user_id"], value=_id, ttl=600
|
key=_id["user_id"],
|
||||||
|
value=_id,
|
||||||
|
ttl=UserAPIKeyCacheTTLEnum.user_information_cache.value,
|
||||||
)
|
)
|
||||||
if custom_db_client is not None:
|
if custom_db_client is not None:
|
||||||
user_id_information = await custom_db_client.get_data(
|
user_id_information = await custom_db_client.get_data(
|
||||||
|
@ -961,7 +969,7 @@ async def user_api_key_auth(
|
||||||
await user_api_key_cache.async_set_cache(
|
await user_api_key_cache.async_set_cache(
|
||||||
key="{}:spend".format(litellm_proxy_admin_name),
|
key="{}:spend".format(litellm_proxy_admin_name),
|
||||||
value=global_proxy_spend,
|
value=global_proxy_spend,
|
||||||
ttl=60,
|
ttl=UserAPIKeyCacheTTLEnum.global_proxy_spend.value,
|
||||||
)
|
)
|
||||||
|
|
||||||
if global_proxy_spend is not None:
|
if global_proxy_spend is not None:
|
||||||
|
@ -993,7 +1001,9 @@ async def user_api_key_auth(
|
||||||
|
|
||||||
# Add hashed token to cache
|
# Add hashed token to cache
|
||||||
await user_api_key_cache.async_set_cache(
|
await user_api_key_cache.async_set_cache(
|
||||||
key=api_key, value=valid_token, ttl=600
|
key=api_key,
|
||||||
|
value=valid_token,
|
||||||
|
ttl=UserAPIKeyCacheTTLEnum.key_information_cache.value,
|
||||||
)
|
)
|
||||||
valid_token_dict = _get_pydantic_json_dict(valid_token)
|
valid_token_dict = _get_pydantic_json_dict(valid_token)
|
||||||
valid_token_dict.pop("token", None)
|
valid_token_dict.pop("token", None)
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue