This commit is contained in:
Alex Burgel 2025-04-24 00:57:59 -07:00 committed by GitHub
commit 9a6805edfa
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
3 changed files with 17 additions and 19 deletions

View file

@ -417,7 +417,7 @@ class OpenTelemetry(CustomLogger):
if not function:
continue
prefix = f"{SpanAttributes.LLM_REQUEST_FUNCTIONS}.{i}"
prefix = f"{SpanAttributes.LLM_REQUEST_FUNCTIONS.value}.{i}"
self.safe_set_attribute(
span=span,
key=f"{prefix}.name",
@ -473,7 +473,7 @@ class OpenTelemetry(CustomLogger):
_value = _function.get(key)
if _value:
kv_pairs[
f"{SpanAttributes.LLM_COMPLETIONS}.{idx}.function_call.{key}"
f"{SpanAttributes.LLM_COMPLETIONS.value}.{idx}.function_call.{key}"
] = _value
return kv_pairs
@ -634,7 +634,7 @@ class OpenTelemetry(CustomLogger):
if prompt.get("role"):
self.safe_set_attribute(
span=span,
key=f"{SpanAttributes.LLM_PROMPTS}.{idx}.role",
key=f"{SpanAttributes.LLM_PROMPTS.value}.{idx}.role",
value=prompt.get("role"),
)
@ -643,7 +643,7 @@ class OpenTelemetry(CustomLogger):
prompt["content"] = str(prompt.get("content"))
self.safe_set_attribute(
span=span,
key=f"{SpanAttributes.LLM_PROMPTS}.{idx}.content",
key=f"{SpanAttributes.LLM_PROMPTS.value}.{idx}.content",
value=prompt.get("content"),
)
#############################################
@ -655,14 +655,14 @@ class OpenTelemetry(CustomLogger):
if choice.get("finish_reason"):
self.safe_set_attribute(
span=span,
key=f"{SpanAttributes.LLM_COMPLETIONS}.{idx}.finish_reason",
key=f"{SpanAttributes.LLM_COMPLETIONS.value}.{idx}.finish_reason",
value=choice.get("finish_reason"),
)
if choice.get("message"):
if choice.get("message").get("role"):
self.safe_set_attribute(
span=span,
key=f"{SpanAttributes.LLM_COMPLETIONS}.{idx}.role",
key=f"{SpanAttributes.LLM_COMPLETIONS.value}.{idx}.role",
value=choice.get("message").get("role"),
)
if choice.get("message").get("content"):
@ -674,7 +674,7 @@ class OpenTelemetry(CustomLogger):
)
self.safe_set_attribute(
span=span,
key=f"{SpanAttributes.LLM_COMPLETIONS}.{idx}.content",
key=f"{SpanAttributes.LLM_COMPLETIONS.value}.{idx}.content",
value=choice.get("message").get("content"),
)

View file

@ -1,8 +1,6 @@
# What is this?
## Unit tests for opentelemetry integration
# What is this?
## Unit test for presidio pii masking
import sys, os, asyncio, time, random
from datetime import datetime
import traceback
@ -17,7 +15,7 @@ sys.path.insert(
) # Adds the parent directory to the system path
import pytest
import litellm
from unittest.mock import patch, MagicMock, AsyncMock
from unittest.mock import patch, MagicMock
from base_test import BaseLoggingCallbackTest
from litellm.types.utils import ModelResponse
@ -26,15 +24,14 @@ class TestOpentelemetryUnitTests(BaseLoggingCallbackTest):
def test_parallel_tool_calls(self, mock_response_obj: ModelResponse):
tool_calls = mock_response_obj.choices[0].message.tool_calls
from litellm.integrations.opentelemetry import OpenTelemetry
from litellm.proxy._types import SpanAttributes
kv_pair_dict = OpenTelemetry._tool_calls_kv_pair(tool_calls)
assert kv_pair_dict == {
f"{SpanAttributes.LLM_COMPLETIONS}.0.function_call.arguments": '{"city": "New York"}',
f"{SpanAttributes.LLM_COMPLETIONS}.0.function_call.name": "get_weather",
f"{SpanAttributes.LLM_COMPLETIONS}.1.function_call.arguments": '{"city": "New York"}',
f"{SpanAttributes.LLM_COMPLETIONS}.1.function_call.name": "get_news",
"gen_ai.completion.0.function_call.arguments": '{"city": "New York"}',
"gen_ai.completion.0.function_call.name": "get_weather",
"gen_ai.completion.1.function_call.arguments": '{"city": "New York"}',
"gen_ai.completion.1.function_call.name": "get_news",
}
@pytest.mark.asyncio

View file

@ -1,8 +1,5 @@
import json
import os
import sys
from datetime import datetime
from unittest.mock import AsyncMock
sys.path.insert(
0, os.path.abspath("../..")
@ -10,7 +7,7 @@ sys.path.insert(
import pytest
import litellm
from litellm.integrations.opentelemetry import OpenTelemetry, OpenTelemetryConfig, Span
from litellm.integrations.opentelemetry import OpenTelemetry, OpenTelemetryConfig
import asyncio
import logging
from opentelemetry.sdk.trace.export.in_memory_span_exporter import InMemorySpanExporter
@ -86,6 +83,10 @@ def validate_litellm_request(span):
"llm.usage.total_tokens",
"gen_ai.usage.completion_tokens",
"gen_ai.usage.prompt_tokens",
"gen_ai.prompt.0.role",
"gen_ai.prompt.0.content",
"gen_ai.completion.0.role",
"gen_ai.completion.0.content",
]
# get the str of all the span attributes