fix import UserAPIKeyCacheTTLEnum

This commit is contained in:
Ishaan Jaff 2024-09-26 18:37:19 -07:00
parent 86ebdc611f
commit fdcab2cafa
3 changed files with 5 additions and 5 deletions

View file

@ -1913,3 +1913,7 @@ class TeamInfoResponseObject(TypedDict):
team_info: LiteLLM_TeamTable
keys: List
team_memberships: List[LiteLLM_TeamMembership]
class UserAPIKeyCacheTTLEnum(enum.Enum):
in_memory_cache_ttl = 60 # 1 min ttl ## configure via `general_settings::user_api_key_cache_ttl: <your-value>`

View file

@ -382,10 +382,6 @@ if os.getenv("DOCS_FILTERED", "False") == "True" and premium_user:
app.openapi = custom_openapi # type: ignore
class UserAPIKeyCacheTTLEnum(enum.Enum):
in_memory_cache_ttl = 60 # 1 min ttl ## configure via `general_settings::user_api_key_cache_ttl: <your-value>`
@app.exception_handler(ProxyException)
async def openai_exception_handler(request: Request, exc: ProxyException):
# NOTE: DO NOT MODIFY THIS, its crucial to map to Openai exceptions

View file

@ -60,6 +60,7 @@ from litellm.proxy._types import (
SpendLogsMetadata,
SpendLogsPayload,
UserAPIKeyAuth,
UserAPIKeyCacheTTLEnum,
)
from litellm.proxy.db.create_views import (
create_missing_views,
@ -70,7 +71,6 @@ from litellm.proxy.hooks.max_budget_limiter import _PROXY_MaxBudgetLimiter
from litellm.proxy.hooks.parallel_request_limiter import (
_PROXY_MaxParallelRequestsHandler,
)
from litellm.proxy.proxy_server import UserAPIKeyCacheTTLEnum
from litellm.types.utils import CallTypes, LoggedLiteLLMParams
if TYPE_CHECKING: