mirror of
https://github.com/BerriAI/litellm.git
synced 2025-04-26 11:14:04 +00:00
Merge pull request #9329 from BerriAI/litellm_fix_reset_budget_job
[Bug fix] Reset Budget Job
This commit is contained in:
commit
5400615ce8
4 changed files with 384 additions and 24 deletions
|
@ -2,7 +2,7 @@ import asyncio
|
|||
import json
|
||||
import time
|
||||
from datetime import datetime, timedelta
|
||||
from typing import List, Optional, Union
|
||||
from typing import List, Literal, Optional, Union
|
||||
|
||||
from litellm._logging import verbose_proxy_logger
|
||||
from litellm.litellm_core_utils.duration_parser import duration_in_seconds
|
||||
|
@ -318,9 +318,11 @@ class ResetBudgetJob:
|
|||
async def _reset_budget_common(
|
||||
item: Union[LiteLLM_TeamTable, LiteLLM_UserTable, LiteLLM_VerificationToken],
|
||||
current_time: datetime,
|
||||
item_type: str,
|
||||
) -> Union[LiteLLM_TeamTable, LiteLLM_UserTable, LiteLLM_VerificationToken]:
|
||||
item_type: Literal["key", "team", "user"],
|
||||
):
|
||||
"""
|
||||
In-place, updates spend=0, and sets budget_reset_at to current_time + budget_duration
|
||||
|
||||
Common logic for resetting budget for a team, user, or key
|
||||
"""
|
||||
try:
|
||||
|
@ -339,19 +341,25 @@ class ResetBudgetJob:
|
|||
async def _reset_budget_for_team(
|
||||
team: LiteLLM_TeamTable, current_time: datetime
|
||||
) -> Optional[LiteLLM_TeamTable]:
|
||||
result = await ResetBudgetJob._reset_budget_common(team, current_time, "team")
|
||||
return result if isinstance(result, LiteLLM_TeamTable) else None
|
||||
await ResetBudgetJob._reset_budget_common(
|
||||
item=team, current_time=current_time, item_type="team"
|
||||
)
|
||||
return team
|
||||
|
||||
@staticmethod
|
||||
async def _reset_budget_for_user(
|
||||
user: LiteLLM_UserTable, current_time: datetime
|
||||
) -> Optional[LiteLLM_UserTable]:
|
||||
result = await ResetBudgetJob._reset_budget_common(user, current_time, "user")
|
||||
return result if isinstance(result, LiteLLM_UserTable) else None
|
||||
await ResetBudgetJob._reset_budget_common(
|
||||
item=user, current_time=current_time, item_type="user"
|
||||
)
|
||||
return user
|
||||
|
||||
@staticmethod
|
||||
async def _reset_budget_for_key(
|
||||
key: LiteLLM_VerificationToken, current_time: datetime
|
||||
) -> Optional[LiteLLM_VerificationToken]:
|
||||
result = await ResetBudgetJob._reset_budget_common(key, current_time, "key")
|
||||
return result if isinstance(result, LiteLLM_VerificationToken) else None
|
||||
await ResetBudgetJob._reset_budget_common(
|
||||
item=key, current_time=current_time, item_type="key"
|
||||
)
|
||||
return key
|
||||
|
|
|
@ -32,7 +32,13 @@ from fastapi import HTTPException, status
|
|||
import litellm
|
||||
import litellm.litellm_core_utils
|
||||
import litellm.litellm_core_utils.litellm_logging
|
||||
from litellm import EmbeddingResponse, ImageResponse, ModelResponse, Router, ModelResponseStream
|
||||
from litellm import (
|
||||
EmbeddingResponse,
|
||||
ImageResponse,
|
||||
ModelResponse,
|
||||
ModelResponseStream,
|
||||
Router,
|
||||
)
|
||||
from litellm._logging import verbose_proxy_logger
|
||||
from litellm._service_logger import ServiceLogging, ServiceTypes
|
||||
from litellm.caching.caching import DualCache, RedisCache
|
||||
|
@ -1009,19 +1015,24 @@ class ProxyLogging:
|
|||
for callback in litellm.callbacks:
|
||||
_callback: Optional[CustomLogger] = None
|
||||
if isinstance(callback, str):
|
||||
_callback = litellm.litellm_core_utils.litellm_logging.get_custom_logger_compatible_class(callback)
|
||||
_callback = litellm.litellm_core_utils.litellm_logging.get_custom_logger_compatible_class(
|
||||
callback
|
||||
)
|
||||
else:
|
||||
_callback = callback # type: ignore
|
||||
if _callback is not None and isinstance(_callback, CustomLogger):
|
||||
if not isinstance(_callback, CustomGuardrail) or _callback.should_run_guardrail(
|
||||
data=request_data, event_type=GuardrailEventHooks.post_call
|
||||
if not isinstance(
|
||||
_callback, CustomGuardrail
|
||||
) or _callback.should_run_guardrail(
|
||||
data=request_data, event_type=GuardrailEventHooks.post_call
|
||||
):
|
||||
response = _callback.async_post_call_streaming_iterator_hook(
|
||||
user_api_key_dict=user_api_key_dict, response=response, request_data=request_data
|
||||
user_api_key_dict=user_api_key_dict,
|
||||
response=response,
|
||||
request_data=request_data,
|
||||
)
|
||||
return response
|
||||
|
||||
|
||||
async def post_call_streaming_hook(
|
||||
self,
|
||||
response: str,
|
||||
|
@ -1733,13 +1744,7 @@ class PrismaClient:
|
|||
verbose_proxy_logger.info("Data Inserted into User Table")
|
||||
return new_user_row
|
||||
elif table_name == "team":
|
||||
db_data = self.jsonify_object(data=data)
|
||||
if db_data.get("members_with_roles", None) is not None and isinstance(
|
||||
db_data["members_with_roles"], list
|
||||
):
|
||||
db_data["members_with_roles"] = json.dumps(
|
||||
db_data["members_with_roles"]
|
||||
)
|
||||
db_data = self.jsonify_team_object(db_data=data)
|
||||
new_team_row = await self.db.litellm_teamtable.upsert(
|
||||
where={"team_id": data["team_id"]},
|
||||
data={
|
||||
|
@ -2010,8 +2015,8 @@ class PrismaClient:
|
|||
batcher = self.db.batch_()
|
||||
for idx, team in enumerate(data_list):
|
||||
try:
|
||||
data_json = self.jsonify_object(
|
||||
data=team.model_dump(exclude_none=True)
|
||||
data_json = self.jsonify_team_object(
|
||||
db_data=team.model_dump(exclude_none=True)
|
||||
)
|
||||
except Exception:
|
||||
data_json = self.jsonify_object(
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue