mirror of
https://github.com/BerriAI/litellm.git
synced 2025-04-26 19:24:27 +00:00
feat(pass_through_endpoint.py): support enforcing key rpm limits on pass through endpoints
Closes https://github.com/BerriAI/litellm/issues/4698
This commit is contained in:
parent
5147b8df13
commit
1d6643df22
5 changed files with 105 additions and 22 deletions
|
@ -1,12 +1,16 @@
|
|||
from typing import Optional
|
||||
import litellm, traceback, sys
|
||||
from litellm.caching import DualCache
|
||||
from litellm.proxy._types import UserAPIKeyAuth
|
||||
from litellm.integrations.custom_logger import CustomLogger
|
||||
from fastapi import HTTPException
|
||||
from litellm._logging import verbose_proxy_logger
|
||||
from litellm import ModelResponse
|
||||
import sys
|
||||
import traceback
|
||||
from datetime import datetime
|
||||
from typing import Optional
|
||||
|
||||
from fastapi import HTTPException
|
||||
|
||||
import litellm
|
||||
from litellm import ModelResponse
|
||||
from litellm._logging import verbose_proxy_logger
|
||||
from litellm.caching import DualCache
|
||||
from litellm.integrations.custom_logger import CustomLogger
|
||||
from litellm.proxy._types import UserAPIKeyAuth
|
||||
|
||||
|
||||
class _PROXY_MaxParallelRequestsHandler(CustomLogger):
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue