forked from phoenix/litellm-mirror
Merge pull request #5576 from BerriAI/litellm_set_max_batch_size
[Fix - Otel logger] Set a max queue size of 100 logs for OTEL
This commit is contained in:
commit
00f1d7b1ff
1 changed files with 6 additions and 2 deletions
|
@ -641,7 +641,9 @@ class OpenTelemetry(CustomLogger):
|
|||
return BatchSpanProcessor(
|
||||
OTLPSpanExporterHTTP(
|
||||
endpoint=self.OTEL_ENDPOINT, headers=_split_otel_headers
|
||||
)
|
||||
),
|
||||
max_queue_size=100,
|
||||
max_export_batch_size=100,
|
||||
)
|
||||
elif self.OTEL_EXPORTER == "otlp_grpc":
|
||||
verbose_logger.debug(
|
||||
|
@ -651,7 +653,9 @@ class OpenTelemetry(CustomLogger):
|
|||
return BatchSpanProcessor(
|
||||
OTLPSpanExporterGRPC(
|
||||
endpoint=self.OTEL_ENDPOINT, headers=_split_otel_headers
|
||||
)
|
||||
),
|
||||
max_queue_size=100,
|
||||
max_export_batch_size=100,
|
||||
)
|
||||
else:
|
||||
verbose_logger.debug(
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue