Merge pull request #5576 from BerriAI/litellm_set_max_batch_size

[Fix - Otel logger] Set a max queue size of 100 logs for OTEL
This commit is contained in:
Ishaan Jaff 2024-09-09 17:39:16 -07:00 committed by GitHub
commit 00f1d7b1ff
No known key found for this signature in database
GPG key ID: B5690EEEBB952194

View file

@ -641,7 +641,9 @@ class OpenTelemetry(CustomLogger):
return BatchSpanProcessor(
OTLPSpanExporterHTTP(
endpoint=self.OTEL_ENDPOINT, headers=_split_otel_headers
)
),
max_queue_size=100,
max_export_batch_size=100,
)
elif self.OTEL_EXPORTER == "otlp_grpc":
verbose_logger.debug(
@ -651,7 +653,9 @@ class OpenTelemetry(CustomLogger):
return BatchSpanProcessor(
OTLPSpanExporterGRPC(
endpoint=self.OTEL_ENDPOINT, headers=_split_otel_headers
)
),
max_queue_size=100,
max_export_batch_size=100,
)
else:
verbose_logger.debug(