Merge pull request #5059 from BerriAI/litelm_log_otel_args

OTEL - Log DB queries / functions on OTEL
This commit is contained in:
Ishaan Jaff 2024-08-05 20:51:11 -07:00 committed by GitHub
commit da848696e3
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
4 changed files with 37 additions and 22 deletions

View file

@ -97,6 +97,7 @@ class ServiceLogging(CustomLogger):
parent_otel_span: Optional[Span] = None,
start_time: Optional[Union[datetime, float]] = None,
end_time: Optional[Union[float, datetime]] = None,
event_metadata: Optional[dict] = None,
):
"""
- For counting if the redis, postgres call is unsuccessful
@ -127,12 +128,16 @@ class ServiceLogging(CustomLogger):
from litellm.proxy.proxy_server import open_telemetry_logger
if parent_otel_span is not None and open_telemetry_logger is not None:
if not isinstance(error, str):
error = str(error)
if open_telemetry_logger is not None:
await open_telemetry_logger.async_service_failure_hook(
payload=payload,
parent_otel_span=parent_otel_span,
start_time=start_time,
end_time=end_time,
event_metadata=event_metadata,
error=error,
)
async def async_post_call_failure_hook(

View file

@ -153,6 +153,11 @@ class OpenTelemetry(CustomLogger):
if event_metadata:
for key, value in event_metadata.items():
if isinstance(value, dict):
try:
value = str(value)
except Exception:
value = "litllm logging error - could_not_json_serialize"
service_logging_span.set_attribute(key, value)
service_logging_span.set_status(Status(StatusCode.OK))
service_logging_span.end(end_time=_end_time_ns)
@ -160,9 +165,11 @@ class OpenTelemetry(CustomLogger):
async def async_service_failure_hook(
self,
payload: ServiceLoggerPayload,
error: Optional[str] = "",
parent_otel_span: Optional[Span] = None,
start_time: Optional[Union[datetime, float]] = None,
end_time: Optional[Union[float, datetime]] = None,
event_metadata: Optional[dict] = None,
):
from datetime import datetime
@ -193,6 +200,17 @@ class OpenTelemetry(CustomLogger):
service_logging_span.set_attribute(
key="service", value=payload.service.value
)
if error:
service_logging_span.set_attribute(key="error", value=error)
if event_metadata:
for key, value in event_metadata.items():
if isinstance(value, dict):
try:
value = str(value)
except Exception:
value = "litllm logging error - could_not_json_serialize"
service_logging_span.set_attribute(key, value)
service_logging_span.set_status(Status(StatusCode.ERROR))
service_logging_span.end(end_time=_end_time_ns)

View file

@ -26,18 +26,6 @@ model_list:
model_info:
mode: audio_speech
# For /fine_tuning/jobs endpoints
finetune_settings:
- custom_llm_provider: azure
api_base: https://exampleopenaiendpoint-production.up.railway.app
api_key: fake-key
api_version: "2023-03-15-preview"
- custom_llm_provider: openai
api_key: os.environ/OPENAI_API_KEY
- custom_llm_provider: "vertex_ai"
vertex_project: "adroit-crow-413218"
vertex_location: "us-central1"
vertex_credentials: "/Users/ishaanjaffer/Downloads/adroit-crow-413218-a956eef1a2a8.json"
# for /files endpoints
files_settings:
@ -48,18 +36,11 @@ files_settings:
- custom_llm_provider: openai
api_key: os.environ/OPENAI_API_KEY
default_vertex_config:
vertex_project: "adroit-crow-413218"
vertex_location: "us-central1"
vertex_credentials: "/Users/ishaanjaffer/Downloads/adroit-crow-413218-a956eef1a2a8.json"
general_settings:
master_key: sk-1234
# Security controls
max_request_size_mb: 100
# google cloud run maximum repsonses size is 32MB
max_response_size_mb: 10
litellm_settings:
callbacks: ["otel"] # 👈 KEY CHANGE

View file

@ -128,6 +128,11 @@ def log_to_opentelemetry(func):
duration=0.0,
start_time=start_time,
end_time=end_time,
event_metadata={
"function_name": func.__name__,
"function_kwargs": kwargs,
"function_args": args,
},
)
elif (
# in litellm custom callbacks kwargs is passed as arg[0]
@ -167,9 +172,15 @@ def log_to_opentelemetry(func):
error=e,
service=ServiceTypes.DB,
call_type=func.__name__,
parent_otel_span=kwargs["parent_otel_span"],
duration=0.0,
start_time=start_time,
end_time=end_time,
event_metadata={
"function_name": func.__name__,
"function_kwargs": kwargs,
"function_args": args,
},
)
raise e