mirror of
https://github.com/BerriAI/litellm.git
synced 2025-04-26 11:14:04 +00:00
fix(parallel_request_limiter.py): fix user+team tpm/rpm limit check
Closes https://github.com/BerriAI/litellm/issues/3788
This commit is contained in:
parent
3397acdf00
commit
56fd0c60d1
7 changed files with 157 additions and 532 deletions
|
@ -35,7 +35,6 @@ from litellm import (
|
|||
)
|
||||
from litellm.utils import ModelResponseIterator
|
||||
from litellm.proxy.hooks.max_budget_limiter import _PROXY_MaxBudgetLimiter
|
||||
from litellm.proxy.hooks.tpm_rpm_limiter import _PROXY_MaxTPMRPMLimiter
|
||||
from litellm.proxy.hooks.cache_control_check import _PROXY_CacheControlCheck
|
||||
from litellm.integrations.custom_logger import CustomLogger
|
||||
from litellm.proxy.db.base_client import CustomDB
|
||||
|
@ -81,9 +80,6 @@ class ProxyLogging:
|
|||
self.call_details["user_api_key_cache"] = user_api_key_cache
|
||||
self.internal_usage_cache = DualCache()
|
||||
self.max_parallel_request_limiter = _PROXY_MaxParallelRequestsHandler()
|
||||
self.max_tpm_rpm_limiter = _PROXY_MaxTPMRPMLimiter(
|
||||
internal_cache=self.internal_usage_cache
|
||||
)
|
||||
self.max_budget_limiter = _PROXY_MaxBudgetLimiter()
|
||||
self.cache_control_check = _PROXY_CacheControlCheck()
|
||||
self.alerting: Optional[List] = None
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue