mirror of
https://github.com/BerriAI/litellm.git
synced 2025-04-25 18:54:30 +00:00
Merge pull request #5059 from BerriAI/litelm_log_otel_args
OTEL - Log DB queries / functions on OTEL
This commit is contained in:
commit
da848696e3
4 changed files with 37 additions and 22 deletions
|
@ -97,6 +97,7 @@ class ServiceLogging(CustomLogger):
|
||||||
parent_otel_span: Optional[Span] = None,
|
parent_otel_span: Optional[Span] = None,
|
||||||
start_time: Optional[Union[datetime, float]] = None,
|
start_time: Optional[Union[datetime, float]] = None,
|
||||||
end_time: Optional[Union[float, datetime]] = None,
|
end_time: Optional[Union[float, datetime]] = None,
|
||||||
|
event_metadata: Optional[dict] = None,
|
||||||
):
|
):
|
||||||
"""
|
"""
|
||||||
- For counting if the redis, postgres call is unsuccessful
|
- For counting if the redis, postgres call is unsuccessful
|
||||||
|
@ -127,12 +128,16 @@ class ServiceLogging(CustomLogger):
|
||||||
|
|
||||||
from litellm.proxy.proxy_server import open_telemetry_logger
|
from litellm.proxy.proxy_server import open_telemetry_logger
|
||||||
|
|
||||||
if parent_otel_span is not None and open_telemetry_logger is not None:
|
if not isinstance(error, str):
|
||||||
|
error = str(error)
|
||||||
|
if open_telemetry_logger is not None:
|
||||||
await open_telemetry_logger.async_service_failure_hook(
|
await open_telemetry_logger.async_service_failure_hook(
|
||||||
payload=payload,
|
payload=payload,
|
||||||
parent_otel_span=parent_otel_span,
|
parent_otel_span=parent_otel_span,
|
||||||
start_time=start_time,
|
start_time=start_time,
|
||||||
end_time=end_time,
|
end_time=end_time,
|
||||||
|
event_metadata=event_metadata,
|
||||||
|
error=error,
|
||||||
)
|
)
|
||||||
|
|
||||||
async def async_post_call_failure_hook(
|
async def async_post_call_failure_hook(
|
||||||
|
|
|
@ -153,6 +153,11 @@ class OpenTelemetry(CustomLogger):
|
||||||
|
|
||||||
if event_metadata:
|
if event_metadata:
|
||||||
for key, value in event_metadata.items():
|
for key, value in event_metadata.items():
|
||||||
|
if isinstance(value, dict):
|
||||||
|
try:
|
||||||
|
value = str(value)
|
||||||
|
except Exception:
|
||||||
|
value = "litllm logging error - could_not_json_serialize"
|
||||||
service_logging_span.set_attribute(key, value)
|
service_logging_span.set_attribute(key, value)
|
||||||
service_logging_span.set_status(Status(StatusCode.OK))
|
service_logging_span.set_status(Status(StatusCode.OK))
|
||||||
service_logging_span.end(end_time=_end_time_ns)
|
service_logging_span.end(end_time=_end_time_ns)
|
||||||
|
@ -160,9 +165,11 @@ class OpenTelemetry(CustomLogger):
|
||||||
async def async_service_failure_hook(
|
async def async_service_failure_hook(
|
||||||
self,
|
self,
|
||||||
payload: ServiceLoggerPayload,
|
payload: ServiceLoggerPayload,
|
||||||
|
error: Optional[str] = "",
|
||||||
parent_otel_span: Optional[Span] = None,
|
parent_otel_span: Optional[Span] = None,
|
||||||
start_time: Optional[Union[datetime, float]] = None,
|
start_time: Optional[Union[datetime, float]] = None,
|
||||||
end_time: Optional[Union[float, datetime]] = None,
|
end_time: Optional[Union[float, datetime]] = None,
|
||||||
|
event_metadata: Optional[dict] = None,
|
||||||
):
|
):
|
||||||
from datetime import datetime
|
from datetime import datetime
|
||||||
|
|
||||||
|
@ -193,6 +200,17 @@ class OpenTelemetry(CustomLogger):
|
||||||
service_logging_span.set_attribute(
|
service_logging_span.set_attribute(
|
||||||
key="service", value=payload.service.value
|
key="service", value=payload.service.value
|
||||||
)
|
)
|
||||||
|
if error:
|
||||||
|
service_logging_span.set_attribute(key="error", value=error)
|
||||||
|
if event_metadata:
|
||||||
|
for key, value in event_metadata.items():
|
||||||
|
if isinstance(value, dict):
|
||||||
|
try:
|
||||||
|
value = str(value)
|
||||||
|
except Exception:
|
||||||
|
value = "litllm logging error - could_not_json_serialize"
|
||||||
|
service_logging_span.set_attribute(key, value)
|
||||||
|
|
||||||
service_logging_span.set_status(Status(StatusCode.ERROR))
|
service_logging_span.set_status(Status(StatusCode.ERROR))
|
||||||
service_logging_span.end(end_time=_end_time_ns)
|
service_logging_span.end(end_time=_end_time_ns)
|
||||||
|
|
||||||
|
|
|
@ -26,18 +26,6 @@ model_list:
|
||||||
model_info:
|
model_info:
|
||||||
mode: audio_speech
|
mode: audio_speech
|
||||||
|
|
||||||
# For /fine_tuning/jobs endpoints
|
|
||||||
finetune_settings:
|
|
||||||
- custom_llm_provider: azure
|
|
||||||
api_base: https://exampleopenaiendpoint-production.up.railway.app
|
|
||||||
api_key: fake-key
|
|
||||||
api_version: "2023-03-15-preview"
|
|
||||||
- custom_llm_provider: openai
|
|
||||||
api_key: os.environ/OPENAI_API_KEY
|
|
||||||
- custom_llm_provider: "vertex_ai"
|
|
||||||
vertex_project: "adroit-crow-413218"
|
|
||||||
vertex_location: "us-central1"
|
|
||||||
vertex_credentials: "/Users/ishaanjaffer/Downloads/adroit-crow-413218-a956eef1a2a8.json"
|
|
||||||
|
|
||||||
# for /files endpoints
|
# for /files endpoints
|
||||||
files_settings:
|
files_settings:
|
||||||
|
@ -48,18 +36,11 @@ files_settings:
|
||||||
- custom_llm_provider: openai
|
- custom_llm_provider: openai
|
||||||
api_key: os.environ/OPENAI_API_KEY
|
api_key: os.environ/OPENAI_API_KEY
|
||||||
|
|
||||||
default_vertex_config:
|
|
||||||
vertex_project: "adroit-crow-413218"
|
|
||||||
vertex_location: "us-central1"
|
|
||||||
vertex_credentials: "/Users/ishaanjaffer/Downloads/adroit-crow-413218-a956eef1a2a8.json"
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
general_settings:
|
general_settings:
|
||||||
master_key: sk-1234
|
master_key: sk-1234
|
||||||
|
|
||||||
# Security controls
|
|
||||||
max_request_size_mb: 100
|
|
||||||
# google cloud run maximum repsonses size is 32MB
|
|
||||||
max_response_size_mb: 10
|
|
||||||
|
|
||||||
|
litellm_settings:
|
||||||
|
callbacks: ["otel"] # 👈 KEY CHANGE
|
|
@ -128,6 +128,11 @@ def log_to_opentelemetry(func):
|
||||||
duration=0.0,
|
duration=0.0,
|
||||||
start_time=start_time,
|
start_time=start_time,
|
||||||
end_time=end_time,
|
end_time=end_time,
|
||||||
|
event_metadata={
|
||||||
|
"function_name": func.__name__,
|
||||||
|
"function_kwargs": kwargs,
|
||||||
|
"function_args": args,
|
||||||
|
},
|
||||||
)
|
)
|
||||||
elif (
|
elif (
|
||||||
# in litellm custom callbacks kwargs is passed as arg[0]
|
# in litellm custom callbacks kwargs is passed as arg[0]
|
||||||
|
@ -167,9 +172,15 @@ def log_to_opentelemetry(func):
|
||||||
error=e,
|
error=e,
|
||||||
service=ServiceTypes.DB,
|
service=ServiceTypes.DB,
|
||||||
call_type=func.__name__,
|
call_type=func.__name__,
|
||||||
|
parent_otel_span=kwargs["parent_otel_span"],
|
||||||
duration=0.0,
|
duration=0.0,
|
||||||
start_time=start_time,
|
start_time=start_time,
|
||||||
end_time=end_time,
|
end_time=end_time,
|
||||||
|
event_metadata={
|
||||||
|
"function_name": func.__name__,
|
||||||
|
"function_kwargs": kwargs,
|
||||||
|
"function_args": args,
|
||||||
|
},
|
||||||
)
|
)
|
||||||
raise e
|
raise e
|
||||||
|
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue