diff --git a/litellm/_service_logger.py b/litellm/_service_logger.py index 5d9ec74056..da0c99aac3 100644 --- a/litellm/_service_logger.py +++ b/litellm/_service_logger.py @@ -97,6 +97,7 @@ class ServiceLogging(CustomLogger): parent_otel_span: Optional[Span] = None, start_time: Optional[Union[datetime, float]] = None, end_time: Optional[Union[float, datetime]] = None, + event_metadata: Optional[dict] = None, ): """ - For counting if the redis, postgres call is unsuccessful @@ -127,12 +128,16 @@ class ServiceLogging(CustomLogger): from litellm.proxy.proxy_server import open_telemetry_logger - if parent_otel_span is not None and open_telemetry_logger is not None: + if not isinstance(error, str): + error = str(error) + if open_telemetry_logger is not None: await open_telemetry_logger.async_service_failure_hook( payload=payload, parent_otel_span=parent_otel_span, start_time=start_time, end_time=end_time, + event_metadata=event_metadata, + error=error, ) async def async_post_call_failure_hook( diff --git a/litellm/integrations/opentelemetry.py b/litellm/integrations/opentelemetry.py index a82380432c..379c418807 100644 --- a/litellm/integrations/opentelemetry.py +++ b/litellm/integrations/opentelemetry.py @@ -153,6 +153,11 @@ class OpenTelemetry(CustomLogger): if event_metadata: for key, value in event_metadata.items(): + if isinstance(value, dict): + try: + value = str(value) + except Exception: + value = "litllm logging error - could_not_json_serialize" service_logging_span.set_attribute(key, value) service_logging_span.set_status(Status(StatusCode.OK)) service_logging_span.end(end_time=_end_time_ns) @@ -160,9 +165,11 @@ class OpenTelemetry(CustomLogger): async def async_service_failure_hook( self, payload: ServiceLoggerPayload, + error: Optional[str] = "", parent_otel_span: Optional[Span] = None, start_time: Optional[Union[datetime, float]] = None, end_time: Optional[Union[float, datetime]] = None, + event_metadata: Optional[dict] = None, ): from datetime import datetime @@ -193,6 +200,17 @@ class OpenTelemetry(CustomLogger): service_logging_span.set_attribute( key="service", value=payload.service.value ) + if error: + service_logging_span.set_attribute(key="error", value=error) + if event_metadata: + for key, value in event_metadata.items(): + if isinstance(value, dict): + try: + value = str(value) + except Exception: + value = "litllm logging error - could_not_json_serialize" + service_logging_span.set_attribute(key, value) + service_logging_span.set_status(Status(StatusCode.ERROR)) service_logging_span.end(end_time=_end_time_ns) diff --git a/litellm/proxy/proxy_config.yaml b/litellm/proxy/proxy_config.yaml index 0750a39376..50e8bcd623 100644 --- a/litellm/proxy/proxy_config.yaml +++ b/litellm/proxy/proxy_config.yaml @@ -26,18 +26,6 @@ model_list: model_info: mode: audio_speech -# For /fine_tuning/jobs endpoints -finetune_settings: - - custom_llm_provider: azure - api_base: https://exampleopenaiendpoint-production.up.railway.app - api_key: fake-key - api_version: "2023-03-15-preview" - - custom_llm_provider: openai - api_key: os.environ/OPENAI_API_KEY - - custom_llm_provider: "vertex_ai" - vertex_project: "adroit-crow-413218" - vertex_location: "us-central1" - vertex_credentials: "/Users/ishaanjaffer/Downloads/adroit-crow-413218-a956eef1a2a8.json" # for /files endpoints files_settings: @@ -48,18 +36,11 @@ files_settings: - custom_llm_provider: openai api_key: os.environ/OPENAI_API_KEY -default_vertex_config: - vertex_project: "adroit-crow-413218" - vertex_location: "us-central1" - vertex_credentials: "/Users/ishaanjaffer/Downloads/adroit-crow-413218-a956eef1a2a8.json" - general_settings: master_key: sk-1234 - # Security controls - max_request_size_mb: 100 - # google cloud run maximum repsonses size is 32MB - max_response_size_mb: 10 +litellm_settings: + callbacks: ["otel"] # 👈 KEY CHANGE \ No newline at end of file diff --git a/litellm/proxy/utils.py b/litellm/proxy/utils.py index cc59f6d58a..1f3bf47226 100644 --- a/litellm/proxy/utils.py +++ b/litellm/proxy/utils.py @@ -128,6 +128,11 @@ def log_to_opentelemetry(func): duration=0.0, start_time=start_time, end_time=end_time, + event_metadata={ + "function_name": func.__name__, + "function_kwargs": kwargs, + "function_args": args, + }, ) elif ( # in litellm custom callbacks kwargs is passed as arg[0] @@ -167,9 +172,15 @@ def log_to_opentelemetry(func): error=e, service=ServiceTypes.DB, call_type=func.__name__, + parent_otel_span=kwargs["parent_otel_span"], duration=0.0, start_time=start_time, end_time=end_time, + event_metadata={ + "function_name": func.__name__, + "function_kwargs": kwargs, + "function_args": args, + }, ) raise e