fix OTEL semantic conventions does not exist

This commit is contained in:
Ishaan Jaff 2024-06-11 12:28:38 -07:00
parent 842b8337e6
commit 1af3d34d8b
3 changed files with 51 additions and 4 deletions

View file

@ -247,7 +247,7 @@ class OpenTelemetry(CustomLogger):
span.end(end_time=self._to_ns(end_time))
def set_tools_attributes(self, span: Span, tools):
from opentelemetry.semconv.ai import SpanAttributes
from litellm.proxy._types import SpanAttributes
import json
if not tools:
@ -272,7 +272,7 @@ class OpenTelemetry(CustomLogger):
pass
def set_attributes(self, span: Span, kwargs, response_obj):
from opentelemetry.semconv.ai import SpanAttributes
from litellm.proxy._types import SpanAttributes
optional_params = kwargs.get("optional_params", {})
litellm_params = kwargs.get("litellm_params", {}) or {}
@ -407,7 +407,7 @@ class OpenTelemetry(CustomLogger):
)
def set_raw_request_attributes(self, span: Span, kwargs, response_obj):
from opentelemetry.semconv.ai import SpanAttributes
from litellm.proxy._types import SpanAttributes
optional_params = kwargs.get("optional_params", {})
litellm_params = kwargs.get("litellm_params", {}) or {}

View file

@ -1513,3 +1513,51 @@ class SpendLogsPayload(TypedDict):
request_tags: str # json str
team_id: Optional[str]
end_user: Optional[str]
class SpanAttributes(str, enum.Enum):
# Note: We've taken this from opentelemetry-semantic-conventions-ai
# I chose to not add a new dependency to litellm for this
# Semantic Conventions for LLM requests, this needs to be removed after
# OpenTelemetry Semantic Conventions support Gen AI.
# Issue at https://github.com/open-telemetry/opentelemetry-python/issues/3868
# Refer to https://github.com/open-telemetry/semantic-conventions/blob/main/docs/gen-ai/llm-spans.md
LLM_SYSTEM = "gen_ai.system"
LLM_REQUEST_MODEL = "gen_ai.request.model"
LLM_REQUEST_MAX_TOKENS = "gen_ai.request.max_tokens"
LLM_REQUEST_TEMPERATURE = "gen_ai.request.temperature"
LLM_REQUEST_TOP_P = "gen_ai.request.top_p"
LLM_PROMPTS = "gen_ai.prompt"
LLM_COMPLETIONS = "gen_ai.completion"
LLM_RESPONSE_MODEL = "gen_ai.response.model"
LLM_USAGE_COMPLETION_TOKENS = "gen_ai.usage.completion_tokens"
LLM_USAGE_PROMPT_TOKENS = "gen_ai.usage.prompt_tokens"
LLM_TOKEN_TYPE = "gen_ai.token.type"
# To be added
# LLM_RESPONSE_FINISH_REASON = "gen_ai.response.finish_reasons"
# LLM_RESPONSE_ID = "gen_ai.response.id"
# LLM
LLM_REQUEST_TYPE = "llm.request.type"
LLM_USAGE_TOTAL_TOKENS = "llm.usage.total_tokens"
LLM_USAGE_TOKEN_TYPE = "llm.usage.token_type"
LLM_USER = "llm.user"
LLM_HEADERS = "llm.headers"
LLM_TOP_K = "llm.top_k"
LLM_IS_STREAMING = "llm.is_streaming"
LLM_FREQUENCY_PENALTY = "llm.frequency_penalty"
LLM_PRESENCE_PENALTY = "llm.presence_penalty"
LLM_CHAT_STOP_SEQUENCES = "llm.chat.stop_sequences"
LLM_REQUEST_FUNCTIONS = "llm.request.functions"
LLM_REQUEST_REPETITION_PENALTY = "llm.request.repetition_penalty"
LLM_RESPONSE_FINISH_REASON = "llm.response.finish_reason"
LLM_RESPONSE_STOP_REASON = "llm.response.stop_reason"
LLM_CONTENT_COMPLETION_CHUNK = "llm.content.completion.chunk"
# OpenAI
LLM_OPENAI_RESPONSE_SYSTEM_FINGERPRINT = "gen_ai.openai.system_fingerprint"
LLM_OPENAI_API_BASE = "gen_ai.openai.api_base"
LLM_OPENAI_API_VERSION = "gen_ai.openai.api_version"
LLM_OPENAI_API_TYPE = "gen_ai.openai.api_type"

View file

@ -31,7 +31,6 @@ azure-identity==1.15.0 # for azure content safety
opentelemetry-api==1.25.0
opentelemetry-sdk==1.25.0
opentelemetry-exporter-otlp==1.25.0
opentelemetry-semantic-conventions-ai==0.3.1
### LITELLM PACKAGE DEPENDENCIES
python-dotenv==1.0.0 # for env