mirror of
https://github.com/BerriAI/litellm.git
synced 2025-04-25 10:44:24 +00:00
fix move logic to custom_batch_logger
This commit is contained in:
parent
e681619381
commit
368a5fd052
2 changed files with 61 additions and 32 deletions
53
litellm/integrations/custom_batch_logger.py
Normal file
53
litellm/integrations/custom_batch_logger.py
Normal file
|
@ -0,0 +1,53 @@
|
|||
"""
|
||||
Custom Logger that handles batching logic
|
||||
|
||||
Use this if you want your logs to be stored in memory and flushed periodically
|
||||
"""
|
||||
|
||||
import asyncio
|
||||
import time
|
||||
from typing import List, Literal, Optional
|
||||
|
||||
from litellm._logging import verbose_logger
|
||||
from litellm.integrations.custom_logger import CustomLogger
|
||||
|
||||
DEFAULT_BATCH_SIZE = 512
|
||||
DEFAULT_FLUSH_INTERVAL_SECONDS = 5
|
||||
|
||||
|
||||
class CustomBatchLogger(CustomLogger):
|
||||
|
||||
def __init__(self, flush_lock: Optional[asyncio.Lock] = None, **kwargs) -> None:
|
||||
"""
|
||||
Args:
|
||||
flush_lock (Optional[asyncio.Lock], optional): Lock to use when flushing the queue. Defaults to None. Only used for custom loggers that do batching
|
||||
"""
|
||||
self.log_queue: List = []
|
||||
self.flush_interval = DEFAULT_FLUSH_INTERVAL_SECONDS # 10 seconds
|
||||
self.batch_size = DEFAULT_BATCH_SIZE
|
||||
self.last_flush_time = time.time()
|
||||
self.flush_lock = flush_lock
|
||||
|
||||
super().__init__(**kwargs)
|
||||
pass
|
||||
|
||||
async def periodic_flush(self):
|
||||
while True:
|
||||
await asyncio.sleep(self.flush_interval)
|
||||
verbose_logger.debug(
|
||||
f"CustomLogger periodic flush after {self.flush_interval} seconds"
|
||||
)
|
||||
await self.flush_queue()
|
||||
|
||||
async def flush_queue(self):
|
||||
async with self.flush_lock:
|
||||
if self.log_queue:
|
||||
verbose_logger.debug(
|
||||
"CustomLogger: Flushing batch of %s events", self.batch_size
|
||||
)
|
||||
await self.async_send_batch()
|
||||
self.log_queue.clear()
|
||||
self.last_flush_time = time.time()
|
||||
|
||||
async def async_send_batch(self):
|
||||
pass
|
Loading…
Add table
Add a link
Reference in a new issue