[Feat - Perf Improvement] DataDog Logger 91% lower latency (#5687)

* fix refactor dd to be an instance of custom logger

* migrate dd logger to be async

* clean up dd logging

* add datadog sync and async code

* use batching for datadog logger

* add doc string for dd logging

* add clear doc string

* fix doc string

* allow debugging intake url

* clean up requirements.txt

* allow setting custom batch size on logger

* fix dd logging to use compression

* fix linting

* add dd load test

* fix dd load test

* fix dd url

* add test_datadog_logging_http_request

* fix test_datadog_logging_http_request
This commit is contained in:
Ishaan Jaff 2024-09-13 17:39:17 -07:00 committed by GitHub
parent cd8d7ca915
commit 741c8e8a45
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
11 changed files with 622 additions and 199 deletions

View file

@ -17,14 +17,19 @@ DEFAULT_FLUSH_INTERVAL_SECONDS = 5
class CustomBatchLogger(CustomLogger):
def __init__(self, flush_lock: Optional[asyncio.Lock] = None, **kwargs) -> None:
def __init__(
self,
flush_lock: Optional[asyncio.Lock] = None,
batch_size: Optional[int] = DEFAULT_BATCH_SIZE,
**kwargs,
) -> None:
"""
Args:
flush_lock (Optional[asyncio.Lock], optional): Lock to use when flushing the queue. Defaults to None. Only used for custom loggers that do batching
"""
self.log_queue: List = []
self.flush_interval = DEFAULT_FLUSH_INTERVAL_SECONDS # 10 seconds
self.batch_size = DEFAULT_BATCH_SIZE
self.batch_size: int = batch_size or DEFAULT_BATCH_SIZE
self.last_flush_time = time.time()
self.flush_lock = flush_lock
@ -43,7 +48,7 @@ class CustomBatchLogger(CustomLogger):
async with self.flush_lock:
if self.log_queue:
verbose_logger.debug(
"CustomLogger: Flushing batch of %s events", self.batch_size
"CustomLogger: Flushing batch of %s events", len(self.log_queue)
)
await self.async_send_batch()
self.log_queue.clear()