mirror of
https://github.com/BerriAI/litellm.git
synced 2025-04-26 11:14:04 +00:00
(fix) initializing OTEL Logging on LiteLLM Proxy - ensure OTEL logger is initialized only once (#7435)
* add otel to _custom_logger_compatible_callbacks_literal * remove extra code * fix _get_custom_logger_settings_from_proxy_server * update unit tests
This commit is contained in:
parent
539f166166
commit
17d5ff2fa4
6 changed files with 45 additions and 45 deletions
|
@ -59,6 +59,7 @@ _custom_logger_compatible_callbacks_literal = Literal[
|
||||||
"dynamic_rate_limiter",
|
"dynamic_rate_limiter",
|
||||||
"langsmith",
|
"langsmith",
|
||||||
"prometheus",
|
"prometheus",
|
||||||
|
"otel",
|
||||||
"datadog",
|
"datadog",
|
||||||
"datadog_llm_observability",
|
"datadog_llm_observability",
|
||||||
"galileo",
|
"galileo",
|
||||||
|
|
|
@ -114,6 +114,21 @@ class OpenTelemetry(CustomLogger):
|
||||||
|
|
||||||
# init CustomLogger params
|
# init CustomLogger params
|
||||||
super().__init__(**kwargs)
|
super().__init__(**kwargs)
|
||||||
|
self._init_otel_logger_on_litellm_proxy()
|
||||||
|
|
||||||
|
def _init_otel_logger_on_litellm_proxy(self):
|
||||||
|
"""
|
||||||
|
Initializes OpenTelemetry for litellm proxy server
|
||||||
|
|
||||||
|
- Adds Otel as a service callback
|
||||||
|
- Sets `proxy_server.open_telemetry_logger` to self
|
||||||
|
"""
|
||||||
|
from litellm.proxy import proxy_server
|
||||||
|
|
||||||
|
# Add Otel as a service callback
|
||||||
|
if "otel" not in litellm.service_callback:
|
||||||
|
litellm.service_callback.append("otel")
|
||||||
|
setattr(proxy_server, "open_telemetry_logger", self)
|
||||||
|
|
||||||
def log_success_event(self, kwargs, response_obj, start_time, end_time):
|
def log_success_event(self, kwargs, response_obj, start_time, end_time):
|
||||||
self._handle_sucess(kwargs, response_obj, start_time, end_time)
|
self._handle_sucess(kwargs, response_obj, start_time, end_time)
|
||||||
|
|
|
@ -2327,8 +2327,11 @@ def _init_custom_logger_compatible_class( # noqa: PLR0915
|
||||||
for callback in _in_memory_loggers:
|
for callback in _in_memory_loggers:
|
||||||
if isinstance(callback, OpenTelemetry):
|
if isinstance(callback, OpenTelemetry):
|
||||||
return callback # type: ignore
|
return callback # type: ignore
|
||||||
|
otel_logger = OpenTelemetry(
|
||||||
otel_logger = OpenTelemetry()
|
**_get_custom_logger_settings_from_proxy_server(
|
||||||
|
callback_name=logging_integration
|
||||||
|
)
|
||||||
|
)
|
||||||
_in_memory_loggers.append(otel_logger)
|
_in_memory_loggers.append(otel_logger)
|
||||||
return otel_logger # type: ignore
|
return otel_logger # type: ignore
|
||||||
|
|
||||||
|
@ -2544,6 +2547,23 @@ def get_custom_logger_compatible_class( # noqa: PLR0915
|
||||||
return None
|
return None
|
||||||
|
|
||||||
|
|
||||||
|
def _get_custom_logger_settings_from_proxy_server(callback_name: str) -> Dict:
|
||||||
|
"""
|
||||||
|
Get the settings for a custom logger from the proxy server config.yaml
|
||||||
|
|
||||||
|
Proxy server config.yaml defines callback_settings as:
|
||||||
|
|
||||||
|
callback_settings:
|
||||||
|
otel:
|
||||||
|
message_logging: False
|
||||||
|
"""
|
||||||
|
from litellm.proxy.proxy_server import callback_settings
|
||||||
|
|
||||||
|
if callback_settings:
|
||||||
|
return dict(callback_settings.get(callback_name, {}))
|
||||||
|
return {}
|
||||||
|
|
||||||
|
|
||||||
def use_custom_pricing_for_model(litellm_params: Optional[dict]) -> bool:
|
def use_custom_pricing_for_model(litellm_params: Optional[dict]) -> bool:
|
||||||
if litellm_params is None:
|
if litellm_params is None:
|
||||||
return False
|
return False
|
||||||
|
|
|
@ -17,7 +17,7 @@ def initialize_callbacks_on_proxy( # noqa: PLR0915
|
||||||
litellm_settings: dict,
|
litellm_settings: dict,
|
||||||
callback_specific_params: dict = {},
|
callback_specific_params: dict = {},
|
||||||
):
|
):
|
||||||
from litellm.proxy.proxy_server import callback_settings, prisma_client
|
from litellm.proxy.proxy_server import prisma_client
|
||||||
|
|
||||||
verbose_proxy_logger.debug(
|
verbose_proxy_logger.debug(
|
||||||
f"{blue_color_code}initializing callbacks={value} on proxy{reset_color_code}"
|
f"{blue_color_code}initializing callbacks={value} on proxy{reset_color_code}"
|
||||||
|
@ -30,22 +30,6 @@ def initialize_callbacks_on_proxy( # noqa: PLR0915
|
||||||
and callback in litellm._known_custom_logger_compatible_callbacks
|
and callback in litellm._known_custom_logger_compatible_callbacks
|
||||||
):
|
):
|
||||||
imported_list.append(callback)
|
imported_list.append(callback)
|
||||||
elif isinstance(callback, str) and callback == "otel":
|
|
||||||
from litellm.integrations.opentelemetry import OpenTelemetry
|
|
||||||
from litellm.proxy import proxy_server
|
|
||||||
|
|
||||||
_otel_settings = {}
|
|
||||||
if isinstance(callback_settings, dict) and "otel" in callback_settings:
|
|
||||||
_otel_settings = callback_settings["otel"]
|
|
||||||
|
|
||||||
open_telemetry_logger = OpenTelemetry(**_otel_settings)
|
|
||||||
|
|
||||||
# Add Otel as a service callback
|
|
||||||
if "otel" not in litellm.service_callback:
|
|
||||||
litellm.service_callback.append("otel")
|
|
||||||
|
|
||||||
imported_list.append(open_telemetry_logger)
|
|
||||||
setattr(proxy_server, "open_telemetry_logger", open_telemetry_logger)
|
|
||||||
elif isinstance(callback, str) and callback == "presidio":
|
elif isinstance(callback, str) and callback == "presidio":
|
||||||
from litellm.proxy.guardrails.guardrail_hooks.presidio import (
|
from litellm.proxy.guardrails.guardrail_hooks.presidio import (
|
||||||
_OPTIONAL_PresidioPIIMasking,
|
_OPTIONAL_PresidioPIIMasking,
|
||||||
|
|
|
@ -11,30 +11,9 @@ model_list:
|
||||||
litellm_params:
|
litellm_params:
|
||||||
model: bedrock/*
|
model: bedrock/*
|
||||||
|
|
||||||
|
litellm_settings:
|
||||||
|
callbacks: ["otel"]
|
||||||
|
|
||||||
guardrails:
|
# callback_settings:
|
||||||
- guardrail_name: "bedrock-pre-guard"
|
# otel:
|
||||||
litellm_params:
|
# message_logging: False
|
||||||
guardrail: bedrock # supported values: "aporia", "bedrock", "lakera"
|
|
||||||
mode: "during_call"
|
|
||||||
guardrailIdentifier: ff6ujrregl1q
|
|
||||||
guardrailVersion: "DRAFT"
|
|
||||||
|
|
||||||
# for /files endpoints
|
|
||||||
# For /fine_tuning/jobs endpoints
|
|
||||||
finetune_settings:
|
|
||||||
- custom_llm_provider: azure
|
|
||||||
api_base: os.environ/AZURE_BATCHES_API_BASE
|
|
||||||
api_key: os.environ/AZURE_BATCHES_API_KEY
|
|
||||||
api_version: "2024-05-01-preview"
|
|
||||||
- custom_llm_provider: openai
|
|
||||||
api_key: os.environ/OPENAI_API_KEY
|
|
||||||
|
|
||||||
# for /files endpoints
|
|
||||||
files_settings:
|
|
||||||
- custom_llm_provider: azure
|
|
||||||
api_base: os.environ/AZURE_BATCHES_API_BASE
|
|
||||||
api_key: os.environ/AZURE_BATCHES_API_KEY
|
|
||||||
api_version: "2024-05-01-preview"
|
|
||||||
- custom_llm_provider: openai
|
|
||||||
api_key: os.environ/OPENAI_API_KEY
|
|
|
@ -65,6 +65,7 @@ callback_class_str_to_classType = {
|
||||||
"langtrace": OpenTelemetry,
|
"langtrace": OpenTelemetry,
|
||||||
"mlflow": MlflowLogger,
|
"mlflow": MlflowLogger,
|
||||||
"langfuse": LangfusePromptManagement,
|
"langfuse": LangfusePromptManagement,
|
||||||
|
"otel": OpenTelemetry,
|
||||||
}
|
}
|
||||||
|
|
||||||
expected_env_vars = {
|
expected_env_vars = {
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue