litellm/tests/local_testing/test_arize_ai.py
2024-10-16 08:33:40 +05:30

34 lines
863 B
Python

import asyncio
import logging
import os
import time
import pytest
from dotenv import load_dotenv
from opentelemetry.sdk.trace.export.in_memory_span_exporter import InMemorySpanExporter
import litellm
from litellm._logging import verbose_logger, verbose_proxy_logger
from litellm.integrations.opentelemetry import OpenTelemetry, OpenTelemetryConfig
load_dotenv()
import logging
@pytest.mark.asyncio()
async def test_async_otel_callback():
litellm.set_verbose = True
verbose_proxy_logger.setLevel(logging.DEBUG)
verbose_logger.setLevel(logging.DEBUG)
litellm.success_callback = ["arize"]
await litellm.acompletion(
model="gpt-3.5-turbo",
messages=[{"role": "user", "content": "hi test from local arize"}],
mock_response="hello",
temperature=0.1,
user="OTEL_USER",
)
await asyncio.sleep(2)