chore(telemetry): more cleanup: remove apis.telemetry

# What does this PR do?


## Test Plan
This commit is contained in:
Eric Huang 2025-10-27 15:56:13 -07:00
parent 1c9a31d8bd
commit 11b076810f
9 changed files with 409 additions and 451 deletions

View file

@ -47,6 +47,9 @@ class TestCollector:
self.span_exporter.clear()
self.metric_reader.get_metrics_data()
def clear_spans(self) -> None:
self.span_exporter.clear()
@pytest.fixture(scope="session")
def _telemetry_providers():

View file

@ -19,6 +19,7 @@ pytestmark = pytest.mark.skipif(
def test_streaming_chunk_count(mock_otlp_collector, llama_stack_client, text_model_id):
"""Verify streaming adds chunk_count and __type__=async_generator."""
mock_otlp_collector.clear()
stream = llama_stack_client.chat.completions.create(
model=text_model_id,
@ -46,6 +47,8 @@ def test_streaming_chunk_count(mock_otlp_collector, llama_stack_client, text_mod
def test_telemetry_format_completeness(mock_otlp_collector, llama_stack_client, text_model_id):
"""Comprehensive validation of telemetry data format including spans and metrics."""
mock_otlp_collector.clear()
response = llama_stack_client.chat.completions.create(
model=text_model_id,
messages=[{"role": "user", "content": "Test trace openai with temperature 0.7"}],