mirror of
https://github.com/BerriAI/litellm.git
synced 2025-04-25 10:44:24 +00:00
fix otel max batch size
This commit is contained in:
parent
e0ac27c00c
commit
dcb50243e7
1 changed files with 4 additions and 2 deletions
|
@ -641,7 +641,8 @@ class OpenTelemetry(CustomLogger):
|
|||
return BatchSpanProcessor(
|
||||
OTLPSpanExporterHTTP(
|
||||
endpoint=self.OTEL_ENDPOINT, headers=_split_otel_headers
|
||||
)
|
||||
),
|
||||
max_queue_size=100,
|
||||
)
|
||||
elif self.OTEL_EXPORTER == "otlp_grpc":
|
||||
verbose_logger.debug(
|
||||
|
@ -651,7 +652,8 @@ class OpenTelemetry(CustomLogger):
|
|||
return BatchSpanProcessor(
|
||||
OTLPSpanExporterGRPC(
|
||||
endpoint=self.OTEL_ENDPOINT, headers=_split_otel_headers
|
||||
)
|
||||
),
|
||||
max_export_batch_size=100,
|
||||
)
|
||||
else:
|
||||
verbose_logger.debug(
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue