mirror of
https://github.com/BerriAI/litellm.git
synced 2025-04-26 03:04:13 +00:00
use inmemory exporter for testing
This commit is contained in:
parent
61c8e9d2ec
commit
0d8a7d5cf0
2 changed files with 13 additions and 6 deletions
|
@ -1,4 +1,4 @@
|
|||
from dataclasses import dataclass, field
|
||||
from dataclasses import dataclass
|
||||
from typing import Optional
|
||||
import os
|
||||
|
||||
|
@ -18,6 +18,8 @@ from opentelemetry.exporter.otlp.proto.grpc.trace_exporter import (
|
|||
OTLPSpanExporter as OTLPSpanExporterGRPC,
|
||||
)
|
||||
from opentelemetry.sdk.trace.export import (
|
||||
SpanExporter,
|
||||
SimpleSpanProcessor,
|
||||
BatchSpanProcessor,
|
||||
ConsoleSpanExporter,
|
||||
)
|
||||
|
@ -28,7 +30,7 @@ LITELLM_RESOURCE = {"service.name": "litellm"}
|
|||
|
||||
@dataclass
|
||||
class OpenTelemetryConfig:
|
||||
exporter: str = field(default="console")
|
||||
exporter: str | SpanExporter = "console"
|
||||
endpoint: Optional[str] = None
|
||||
bearer_token: Optional[str] = None
|
||||
|
||||
|
@ -106,6 +108,9 @@ class OpenTelemetry(CustomLogger):
|
|||
return TraceContextTextMapPropagator().extract(carrier=carrier)
|
||||
|
||||
def _get_span_processor(self):
|
||||
if isinstance(self.config.exporter, SpanExporter):
|
||||
return SimpleSpanProcessor(self.config.exporter)
|
||||
|
||||
if self.config.exporter == "console":
|
||||
return BatchSpanProcessor(ConsoleSpanExporter())
|
||||
elif self.config.exporter == "otlp_http":
|
||||
|
|
|
@ -604,11 +604,12 @@ def test_load_router_config(mock_cache, fake_env_vars):
|
|||
# test_load_router_config()
|
||||
|
||||
from litellm.integrations.opentelemetry import OpenTelemetry, OpenTelemetryConfig
|
||||
|
||||
from opentelemetry.sdk.trace.export.in_memory_span_exporter import InMemorySpanExporter
|
||||
|
||||
@mock_patch_acompletion()
|
||||
def test_otel_with_proxy_server(mock_acompletion, client_no_auth, capsys):
|
||||
litellm.callbacks = [OpenTelemetry(OpenTelemetryConfig(exporter="console"))]
|
||||
def test_otel_with_proxy_server(mock_acompletion, client_no_auth):
|
||||
exporter = InMemorySpanExporter()
|
||||
litellm.callbacks = [OpenTelemetry(OpenTelemetryConfig(exporter=exporter))]
|
||||
|
||||
data = {"model": "gpt-3.5-turbo", "messages": [{"role": "user", "content": "hi"}]}
|
||||
|
||||
|
@ -626,4 +627,5 @@ def test_otel_with_proxy_server(mock_acompletion, client_no_auth, capsys):
|
|||
assert response.status_code == 200
|
||||
assert response.json() == example_completion_result
|
||||
|
||||
print(capsys.readouterr())
|
||||
spans = exporter.get_finished_spans()
|
||||
assert len(spans) == 0
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue