use common helpers for writing to otel

This commit is contained in:
Ishaan Jaff 2024-07-27 11:40:39 -07:00
parent f71ba63cab
commit 19fb5cc11c
5 changed files with 23 additions and 34 deletions

View file

@ -56,6 +56,7 @@ class ServiceLogging(CustomLogger):
parent_otel_span: Optional[Span] = None, parent_otel_span: Optional[Span] = None,
start_time: Optional[Union[datetime, float]] = None, start_time: Optional[Union[datetime, float]] = None,
end_time: Optional[Union[datetime, float]] = None, end_time: Optional[Union[datetime, float]] = None,
event_metadata: Optional[dict] = None,
): ):
""" """
- For counting if the redis, postgres call is successful - For counting if the redis, postgres call is successful
@ -84,6 +85,7 @@ class ServiceLogging(CustomLogger):
parent_otel_span=parent_otel_span, parent_otel_span=parent_otel_span,
start_time=start_time, start_time=start_time,
end_time=end_time, end_time=end_time,
event_metadata=event_metadata,
) )
async def async_service_failure_hook( async def async_service_failure_hook(

View file

@ -21,7 +21,7 @@ from openai._models import BaseModel as OpenAIObject
import litellm import litellm
from litellm._logging import verbose_logger from litellm._logging import verbose_logger
from litellm.integrations.opentelemetry import _get_parent_otel_span_from_kwargs from litellm.litellm_core_utils.core_helpers import _get_parent_otel_span_from_kwargs
from litellm.types.services import ServiceLoggerPayload, ServiceTypes from litellm.types.services import ServiceLoggerPayload, ServiceTypes

View file

@ -119,6 +119,7 @@ class OpenTelemetry(CustomLogger):
parent_otel_span: Optional[Span] = None, parent_otel_span: Optional[Span] = None,
start_time: Optional[Union[datetime, float]] = None, start_time: Optional[Union[datetime, float]] = None,
end_time: Optional[Union[datetime, float]] = None, end_time: Optional[Union[datetime, float]] = None,
event_metadata: Optional[dict] = None,
): ):
from datetime import datetime from datetime import datetime
@ -149,6 +150,10 @@ class OpenTelemetry(CustomLogger):
service_logging_span.set_attribute( service_logging_span.set_attribute(
key="service", value=payload.service.value key="service", value=payload.service.value
) )
if event_metadata:
for key, value in event_metadata.items():
service_logging_span.set_attribute(key, value)
service_logging_span.set_status(Status(StatusCode.OK)) service_logging_span.set_status(Status(StatusCode.OK))
service_logging_span.end(end_time=_end_time_ns) service_logging_span.end(end_time=_end_time_ns)
@ -703,24 +708,3 @@ class OpenTelemetry(CustomLogger):
management_endpoint_span.set_attribute(f"exception", str(_exception)) management_endpoint_span.set_attribute(f"exception", str(_exception))
management_endpoint_span.set_status(Status(StatusCode.ERROR)) management_endpoint_span.set_status(Status(StatusCode.ERROR))
management_endpoint_span.end(end_time=_end_time_ns) management_endpoint_span.end(end_time=_end_time_ns)
# Helper functions used for OTEL logging
def _get_parent_otel_span_from_kwargs(kwargs: Optional[dict] = None):
try:
if kwargs is None:
return None
litellm_params = kwargs.get("litellm_params")
_metadata = kwargs.get("metadata") or {}
if "litellm_parent_otel_span" in _metadata:
return _metadata["litellm_parent_otel_span"]
elif (
litellm_params is not None
and litellm_params.get("metadata") is not None
and "litellm_parent_otel_span" in litellm_params.get("metadata", {})
):
return litellm_params["metadata"]["litellm_parent_otel_span"]
elif "litellm_parent_otel_span" in kwargs:
return kwargs["litellm_parent_otel_span"]
except:
return None

View file

@ -108,6 +108,7 @@ from litellm._logging import verbose_proxy_logger, verbose_router_logger
from litellm.caching import DualCache, RedisCache from litellm.caching import DualCache, RedisCache
from litellm.exceptions import RejectedRequestError from litellm.exceptions import RejectedRequestError
from litellm.integrations.slack_alerting import SlackAlerting, SlackAlertingArgs from litellm.integrations.slack_alerting import SlackAlerting, SlackAlertingArgs
from litellm.litellm_core_utils.core_helpers import get_litellm_metadata_from_kwargs
from litellm.llms.custom_httpx.httpx_handler import HTTPHandler from litellm.llms.custom_httpx.httpx_handler import HTTPHandler
from litellm.proxy._types import * from litellm.proxy._types import *
from litellm.proxy.analytics_endpoints.analytics_endpoints import ( from litellm.proxy.analytics_endpoints.analytics_endpoints import (
@ -672,18 +673,15 @@ async def _PROXY_track_cost_callback(
litellm_params = kwargs.get("litellm_params", {}) or {} litellm_params = kwargs.get("litellm_params", {}) or {}
proxy_server_request = litellm_params.get("proxy_server_request") or {} proxy_server_request = litellm_params.get("proxy_server_request") or {}
end_user_id = proxy_server_request.get("body", {}).get("user", None) end_user_id = proxy_server_request.get("body", {}).get("user", None)
user_id = kwargs["litellm_params"]["metadata"].get("user_api_key_user_id", None) metadata = get_litellm_metadata_from_kwargs(kwargs=kwargs)
team_id = kwargs["litellm_params"]["metadata"].get("user_api_key_team_id", None) user_id = metadata.get("user_api_key_user_id", None)
org_id = kwargs["litellm_params"]["metadata"].get("user_api_key_org_id", None) team_id = metadata.get("user_api_key_team_id", None)
key_alias = kwargs["litellm_params"]["metadata"].get("user_api_key_alias", None) org_id = metadata.get("user_api_key_org_id", None)
end_user_max_budget = kwargs["litellm_params"]["metadata"].get( key_alias = metadata.get("user_api_key_alias", None)
"user_api_end_user_max_budget", None end_user_max_budget = metadata.get("user_api_end_user_max_budget", None)
)
if kwargs.get("response_cost", None) is not None: if kwargs.get("response_cost", None) is not None:
response_cost = kwargs["response_cost"] response_cost = kwargs["response_cost"]
user_api_key = kwargs["litellm_params"]["metadata"].get( user_api_key = metadata.get("user_api_key", None)
"user_api_key", None
)
if kwargs.get("cache_hit", False) == True: if kwargs.get("cache_hit", False) == True:
response_cost = 0.0 response_cost = 0.0

View file

@ -31,8 +31,11 @@ from litellm._service_logger import ServiceLogging, ServiceTypes
from litellm.caching import DualCache, RedisCache from litellm.caching import DualCache, RedisCache
from litellm.exceptions import RejectedRequestError from litellm.exceptions import RejectedRequestError
from litellm.integrations.custom_logger import CustomLogger from litellm.integrations.custom_logger import CustomLogger
from litellm.integrations.opentelemetry import _get_parent_otel_span_from_kwargs
from litellm.integrations.slack_alerting import SlackAlerting from litellm.integrations.slack_alerting import SlackAlerting
from litellm.litellm_core_utils.core_helpers import (
_get_parent_otel_span_from_kwargs,
get_litellm_metadata_from_kwargs,
)
from litellm.litellm_core_utils.litellm_logging import Logging from litellm.litellm_core_utils.litellm_logging import Logging
from litellm.llms.custom_httpx.httpx_handler import HTTPHandler from litellm.llms.custom_httpx.httpx_handler import HTTPHandler
from litellm.proxy._types import ( from litellm.proxy._types import (
@ -139,13 +142,15 @@ def log_to_opentelemetry(func):
if parent_otel_span is not None: if parent_otel_span is not None:
from litellm.proxy.proxy_server import proxy_logging_obj from litellm.proxy.proxy_server import proxy_logging_obj
metadata = get_litellm_metadata_from_kwargs(kwargs=passed_kwargs)
await proxy_logging_obj.service_logging_obj.async_service_success_hook( await proxy_logging_obj.service_logging_obj.async_service_success_hook(
service=ServiceTypes.DB, service=ServiceTypes.BATCH_WRITE_TO_DB,
call_type=func.__name__, call_type=func.__name__,
parent_otel_span=parent_otel_span, parent_otel_span=parent_otel_span,
duration=0.0, duration=0.0,
start_time=start_time, start_time=start_time,
end_time=end_time, end_time=end_time,
event_metadata=metadata,
) )
# end of logging to otel # end of logging to otel
return result return result