forked from phoenix/litellm-mirror
docs update standard logging object
This commit is contained in:
parent
2987b14f3b
commit
aa84bcebaf
3 changed files with 19 additions and 78 deletions
|
@ -87,7 +87,8 @@ class StandardLoggingPayload(TypedDict):
|
||||||
saved_cache_cost: Optional[float]
|
saved_cache_cost: Optional[float]
|
||||||
request_tags: list
|
request_tags: list
|
||||||
end_user: Optional[str]
|
end_user: Optional[str]
|
||||||
requester_ip_address: Optional[str]
|
requester_ip_address: Optional[str] # IP address of requester
|
||||||
|
requester_metadata: Optional[dict] # metadata passed in request in the "metadata" field
|
||||||
messages: Optional[Union[str, list, dict]]
|
messages: Optional[Union[str, list, dict]]
|
||||||
response: Optional[Union[str, list, dict]]
|
response: Optional[Union[str, list, dict]]
|
||||||
model_parameters: dict
|
model_parameters: dict
|
||||||
|
|
|
@ -25,18 +25,6 @@ class RequestKwargs(TypedDict):
|
||||||
optional_params: Optional[Dict[str, Any]]
|
optional_params: Optional[Dict[str, Any]]
|
||||||
|
|
||||||
|
|
||||||
class GCSBucketPayload(TypedDict):
|
|
||||||
request_kwargs: Optional[RequestKwargs]
|
|
||||||
response_obj: Optional[Dict]
|
|
||||||
start_time: str
|
|
||||||
end_time: str
|
|
||||||
response_cost: Optional[float]
|
|
||||||
metadata: Optional[StandardLoggingMetadata]
|
|
||||||
spend_log_metadata: str
|
|
||||||
exception: Optional[str]
|
|
||||||
log_event_type: Optional[str]
|
|
||||||
|
|
||||||
|
|
||||||
class GCSBucketLogger(GCSBucketBase):
|
class GCSBucketLogger(GCSBucketBase):
|
||||||
def __init__(self, bucket_name: Optional[str] = None) -> None:
|
def __init__(self, bucket_name: Optional[str] = None) -> None:
|
||||||
from litellm.proxy.proxy_server import premium_user
|
from litellm.proxy.proxy_server import premium_user
|
||||||
|
@ -72,10 +60,12 @@ class GCSBucketLogger(GCSBucketBase):
|
||||||
end_time_str = end_time.strftime("%Y-%m-%d %H:%M:%S")
|
end_time_str = end_time.strftime("%Y-%m-%d %H:%M:%S")
|
||||||
headers = await self.construct_request_headers()
|
headers = await self.construct_request_headers()
|
||||||
|
|
||||||
logging_payload: GCSBucketPayload = await self.get_gcs_payload(
|
logging_payload: Optional[StandardLoggingPayload] = kwargs.get(
|
||||||
kwargs, response_obj, start_time_str, end_time_str
|
"standard_logging_object", None
|
||||||
)
|
)
|
||||||
logging_payload["log_event_type"] = "successful_api_call"
|
|
||||||
|
if logging_payload is None:
|
||||||
|
raise ValueError("standard_logging_object not found in kwargs")
|
||||||
|
|
||||||
json_logged_payload = json.dumps(logging_payload)
|
json_logged_payload = json.dumps(logging_payload)
|
||||||
|
|
||||||
|
@ -117,10 +107,12 @@ class GCSBucketLogger(GCSBucketBase):
|
||||||
end_time_str = end_time.strftime("%Y-%m-%d %H:%M:%S")
|
end_time_str = end_time.strftime("%Y-%m-%d %H:%M:%S")
|
||||||
headers = await self.construct_request_headers()
|
headers = await self.construct_request_headers()
|
||||||
|
|
||||||
logging_payload: GCSBucketPayload = await self.get_gcs_payload(
|
logging_payload: Optional[StandardLoggingPayload] = kwargs.get(
|
||||||
kwargs, response_obj, start_time_str, end_time_str
|
"standard_logging_object", None
|
||||||
)
|
)
|
||||||
logging_payload["log_event_type"] = "failed_api_call"
|
|
||||||
|
if logging_payload is None:
|
||||||
|
raise ValueError("standard_logging_object not found in kwargs")
|
||||||
|
|
||||||
_litellm_params = kwargs.get("litellm_params") or {}
|
_litellm_params = kwargs.get("litellm_params") or {}
|
||||||
metadata = _litellm_params.get("metadata") or {}
|
metadata = _litellm_params.get("metadata") or {}
|
||||||
|
@ -150,59 +142,3 @@ class GCSBucketLogger(GCSBucketBase):
|
||||||
verbose_logger.debug("GCS Bucket response.text %s", response.text)
|
verbose_logger.debug("GCS Bucket response.text %s", response.text)
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
verbose_logger.error("GCS Bucket logging error: %s", str(e))
|
verbose_logger.error("GCS Bucket logging error: %s", str(e))
|
||||||
|
|
||||||
async def get_gcs_payload(
|
|
||||||
self, kwargs, response_obj, start_time, end_time
|
|
||||||
) -> GCSBucketPayload:
|
|
||||||
from litellm.proxy.spend_tracking.spend_tracking_utils import (
|
|
||||||
get_logging_payload,
|
|
||||||
)
|
|
||||||
|
|
||||||
request_kwargs = RequestKwargs(
|
|
||||||
model=kwargs.get("model", None),
|
|
||||||
messages=kwargs.get("messages", None),
|
|
||||||
optional_params=kwargs.get("optional_params", None),
|
|
||||||
)
|
|
||||||
response_dict = {}
|
|
||||||
if response_obj:
|
|
||||||
response_dict = convert_litellm_response_object_to_dict(
|
|
||||||
response_obj=response_obj
|
|
||||||
)
|
|
||||||
|
|
||||||
exception_str = None
|
|
||||||
|
|
||||||
# Handle logging exception attributes
|
|
||||||
if "exception" in kwargs:
|
|
||||||
exception_str = kwargs.get("exception", "")
|
|
||||||
if not isinstance(exception_str, str):
|
|
||||||
exception_str = str(exception_str)
|
|
||||||
|
|
||||||
_spend_log_payload: SpendLogsPayload = get_logging_payload(
|
|
||||||
kwargs=kwargs,
|
|
||||||
response_obj=response_obj,
|
|
||||||
start_time=start_time,
|
|
||||||
end_time=end_time,
|
|
||||||
end_user_id=kwargs.get("end_user_id", None),
|
|
||||||
)
|
|
||||||
|
|
||||||
# Ensure everything in the payload is converted to str
|
|
||||||
payload: Optional[StandardLoggingPayload] = kwargs.get(
|
|
||||||
"standard_logging_object", None
|
|
||||||
)
|
|
||||||
|
|
||||||
if payload is None:
|
|
||||||
raise ValueError("standard_logging_object not found in kwargs")
|
|
||||||
|
|
||||||
gcs_payload: GCSBucketPayload = GCSBucketPayload(
|
|
||||||
request_kwargs=request_kwargs,
|
|
||||||
response_obj=response_dict,
|
|
||||||
start_time=start_time,
|
|
||||||
end_time=end_time,
|
|
||||||
metadata=payload["metadata"],
|
|
||||||
spend_log_metadata=_spend_log_payload.get("metadata", ""),
|
|
||||||
response_cost=payload["response_cost"],
|
|
||||||
exception=exception_str,
|
|
||||||
log_event_type=None,
|
|
||||||
)
|
|
||||||
|
|
||||||
return gcs_payload
|
|
||||||
|
|
|
@ -19,8 +19,12 @@ model_list:
|
||||||
model: openai/429
|
model: openai/429
|
||||||
api_key: fake-key
|
api_key: fake-key
|
||||||
api_base: https://exampleopenaiendpoint-production.up.railway.app
|
api_base: https://exampleopenaiendpoint-production.up.railway.app
|
||||||
tags: ["fake"]
|
|
||||||
|
|
||||||
|
|
||||||
general_settings:
|
general_settings:
|
||||||
master_key: sk-1234
|
master_key: sk-1234
|
||||||
|
|
||||||
|
litellm_settings:
|
||||||
|
success_callback: ["gcs_bucket"]
|
||||||
|
|
||||||
|
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue