mirror of
https://github.com/BerriAI/litellm.git
synced 2025-04-27 03:34:10 +00:00
Fix wrong import and use space_id instead of space_key for Arize integration
This commit is contained in:
parent
f8ac675321
commit
0dfc21e80a
11 changed files with 221 additions and 186 deletions
|
@ -1,11 +1,13 @@
|
|||
import asyncio
|
||||
import logging
|
||||
|
||||
from litellm import Choices
|
||||
import pytest
|
||||
from dotenv import load_dotenv
|
||||
|
||||
import litellm
|
||||
from litellm._logging import verbose_logger, verbose_proxy_logger
|
||||
from litellm.integrations._types.open_inference import SpanAttributes
|
||||
from litellm.integrations.arize.arize import ArizeConfig, ArizeLogger
|
||||
|
||||
load_dotenv()
|
||||
|
@ -32,7 +34,7 @@ async def test_async_otel_callback():
|
|||
|
||||
@pytest.fixture
|
||||
def mock_env_vars(monkeypatch):
|
||||
monkeypatch.setenv("ARIZE_SPACE_KEY", "test_space_key")
|
||||
monkeypatch.setenv("ARIZE_SPACE_ID", "test_space_id")
|
||||
monkeypatch.setenv("ARIZE_API_KEY", "test_api_key")
|
||||
|
||||
|
||||
|
@ -42,7 +44,7 @@ def test_get_arize_config(mock_env_vars):
|
|||
"""
|
||||
config = ArizeLogger.get_arize_config()
|
||||
assert isinstance(config, ArizeConfig)
|
||||
assert config.space_key == "test_space_key"
|
||||
assert config.space_id == "test_space_id"
|
||||
assert config.api_key == "test_api_key"
|
||||
assert config.endpoint == "https://otlp.arize.com/v1"
|
||||
assert config.protocol == "otlp_grpc"
|
||||
|
@ -58,3 +60,41 @@ def test_get_arize_config_with_endpoints(mock_env_vars, monkeypatch):
|
|||
config = ArizeLogger.get_arize_config()
|
||||
assert config.endpoint == "grpc://test.endpoint"
|
||||
assert config.protocol == "otlp_grpc"
|
||||
|
||||
|
||||
def test_arize_set_attributes():
|
||||
"""
|
||||
Test setting attributes for Arize
|
||||
"""
|
||||
from unittest.mock import MagicMock
|
||||
from litellm.types.utils import ModelResponse
|
||||
|
||||
span = MagicMock()
|
||||
kwargs = {
|
||||
"role": "user",
|
||||
"content": "simple arize test",
|
||||
"model": "gpt-4o",
|
||||
"messages": [{"role": "user", "content": "basic arize test"}],
|
||||
"litellm_params": {"metadata": {"key": "value"}},
|
||||
"standard_logging_object": {"model_parameters": {"user": "test_user"}}
|
||||
}
|
||||
response_obj = ModelResponse(usage={"total_tokens": 100, "completion_tokens": 60, "prompt_tokens": 40},
|
||||
choices=[Choices(message={"role": "assistant", "content": "response content"})])
|
||||
|
||||
ArizeLogger.set_arize_attributes(span, kwargs, response_obj)
|
||||
|
||||
assert span.set_attribute.call_count == 14
|
||||
span.set_attribute.assert_any_call(SpanAttributes.METADATA, str({"key": "value"}))
|
||||
span.set_attribute.assert_any_call(SpanAttributes.LLM_MODEL_NAME, "gpt-4o")
|
||||
span.set_attribute.assert_any_call(SpanAttributes.OPENINFERENCE_SPAN_KIND, "LLM")
|
||||
span.set_attribute.assert_any_call(SpanAttributes.INPUT_VALUE, "basic arize test")
|
||||
span.set_attribute.assert_any_call("llm.input_messages.0.message.role", "user")
|
||||
span.set_attribute.assert_any_call("llm.input_messages.0.message.content", "basic arize test")
|
||||
span.set_attribute.assert_any_call(SpanAttributes.LLM_INVOCATION_PARAMETERS, '{"user": "test_user"}')
|
||||
span.set_attribute.assert_any_call(SpanAttributes.USER_ID, "test_user")
|
||||
span.set_attribute.assert_any_call(SpanAttributes.OUTPUT_VALUE, "response content")
|
||||
span.set_attribute.assert_any_call("llm.output_messages.0.message.role", "assistant")
|
||||
span.set_attribute.assert_any_call("llm.output_messages.0.message.content", "response content")
|
||||
span.set_attribute.assert_any_call(SpanAttributes.LLM_TOKEN_COUNT_TOTAL, 100)
|
||||
span.set_attribute.assert_any_call(SpanAttributes.LLM_TOKEN_COUNT_COMPLETION, 60)
|
||||
span.set_attribute.assert_any_call(SpanAttributes.LLM_TOKEN_COUNT_PROMPT, 40)
|
Loading…
Add table
Add a link
Reference in a new issue