(fix) logging Auth errors on datadog (#6995)

* fix get_standard_logging_object_payload

* fix async_post_call_failure_hook

* fix post_call_failure_hook

* fix change

* fix _is_proxy_only_error

* fix async_post_call_failure_hook

* fix getting request body

* remove redundant code

* use a well named original function name for auth errors

* fix logging auth fails on DD

* fix using request body

* use helper for _handle_logging_proxy_only_error
This commit is contained in:
Ishaan Jaff 2024-12-02 23:01:21 -08:00 committed by GitHub
parent df3aca66a9
commit 204d83b3d1
6 changed files with 121 additions and 413 deletions

View file

@ -87,6 +87,7 @@ from litellm.proxy.hooks.max_budget_limiter import _PROXY_MaxBudgetLimiter
from litellm.proxy.hooks.parallel_request_limiter import (
_PROXY_MaxParallelRequestsHandler,
)
from litellm.proxy.litellm_pre_call_utils import LiteLLMProxyRequestSetup
from litellm.secret_managers.main import str_to_bool
from litellm.types.integrations.slack_alerting import DEFAULT_ALERT_TYPES
from litellm.types.utils import CallTypes, LoggedLiteLLMParams
@ -750,6 +751,8 @@ class ProxyLogging:
request_data: dict,
original_exception: Exception,
user_api_key_dict: UserAPIKeyAuth,
error_type: Optional[ProxyErrorTypes] = None,
route: Optional[str] = None,
):
"""
Allows users to raise custom exceptions/log when a call fails, without having to deal with parsing Request body.
@ -787,87 +790,16 @@ class ProxyLogging:
)
### LOGGING ###
if isinstance(original_exception, HTTPException):
litellm_logging_obj: Optional[Logging] = request_data.get(
"litellm_logging_obj", None
if self._is_proxy_only_error(
original_exception=original_exception, error_type=error_type
):
await self._handle_logging_proxy_only_error(
request_data=request_data,
user_api_key_dict=user_api_key_dict,
route=route,
original_exception=original_exception,
)
if litellm_logging_obj is None:
import uuid
request_data["litellm_call_id"] = str(uuid.uuid4())
litellm_logging_obj, data = litellm.utils.function_setup(
original_function="IGNORE_THIS",
rules_obj=litellm.utils.Rules(),
start_time=datetime.now(),
**request_data,
)
if litellm_logging_obj is not None:
## UPDATE LOGGING INPUT
_optional_params = {}
_litellm_params = {}
litellm_param_keys = LoggedLiteLLMParams.__annotations__.keys()
for k, v in request_data.items():
if k in litellm_param_keys:
_litellm_params[k] = v
elif k != "model" and k != "user":
_optional_params[k] = v
litellm_logging_obj.update_environment_variables(
model=request_data.get("model", ""),
user=request_data.get("user", ""),
optional_params=_optional_params,
litellm_params=_litellm_params,
)
input: Union[list, str, dict] = ""
if "messages" in request_data and isinstance(
request_data["messages"], list
):
input = request_data["messages"]
elif "prompt" in request_data and isinstance(
request_data["prompt"], str
):
input = request_data["prompt"]
elif "input" in request_data and isinstance(
request_data["input"], list
):
input = request_data["input"]
litellm_logging_obj.pre_call(
input=input,
api_key="",
)
# log the custom exception
await litellm_logging_obj.async_failure_handler(
exception=original_exception,
traceback_exception=traceback.format_exc(),
)
threading.Thread(
target=litellm_logging_obj.failure_handler,
args=(
original_exception,
traceback.format_exc(),
),
).start()
await self._run_post_call_failure_hook_custom_loggers(
original_exception=original_exception,
request_data=request_data,
user_api_key_dict=user_api_key_dict,
)
return
async def _run_post_call_failure_hook_custom_loggers(
self,
original_exception: Exception,
request_data: dict,
user_api_key_dict: UserAPIKeyAuth,
):
for callback in litellm.callbacks:
try:
_callback: Optional[CustomLogger] = None
@ -885,39 +817,113 @@ class ProxyLogging:
)
except Exception as e:
raise e
return
async def async_log_proxy_authentication_errors(
def _is_proxy_only_error(
self,
original_exception: Exception,
request: Request,
parent_otel_span: Optional[Any],
api_key: Optional[str],
error_type: Optional[ProxyErrorTypes] = None,
) -> bool:
"""
Return True if the error is a Proxy Only Error
Prevents double logging of LLM API exceptions
e.g should only return True for:
- Authentication Errors from user_api_key_auth
- HTTP HTTPException (rate limit errors)
"""
return isinstance(original_exception, HTTPException) or (
error_type == ProxyErrorTypes.auth_error
)
async def _handle_logging_proxy_only_error(
self,
request_data: dict,
user_api_key_dict: UserAPIKeyAuth,
route: Optional[str] = None,
original_exception: Optional[Exception] = None,
):
"""
Handler for Logging Authentication Errors on LiteLLM Proxy
Why not use post_call_failure_hook?
- `post_call_failure_hook` calls `litellm_logging_obj.async_failure_handler`. This led to the Exception being logged twice
Handle logging for proxy only errors by calling `litellm_logging_obj.async_failure_handler`
What does this handler do?
- Logs Authentication Errors (like invalid API Key passed) to CustomLogger compatible classes (OTEL, Datadog etc)
- calls CustomLogger.async_post_call_failure_hook
Is triggered when self._is_proxy_only_error() returns True
"""
litellm_logging_obj: Optional[Logging] = request_data.get(
"litellm_logging_obj", None
)
if litellm_logging_obj is None:
import uuid
user_api_key_dict = UserAPIKeyAuth(
parent_otel_span=parent_otel_span,
token=_hash_token_if_needed(token=api_key or ""),
)
try:
request_data = await request.json()
except json.JSONDecodeError:
# For GET requests or requests without a JSON body
request_data = {}
await self._run_post_call_failure_hook_custom_loggers(
original_exception=original_exception,
request_data=request_data,
user_api_key_dict=user_api_key_dict,
)
pass
request_data["litellm_call_id"] = str(uuid.uuid4())
user_api_key_logged_metadata = (
LiteLLMProxyRequestSetup.get_sanitized_user_information_from_key(
user_api_key_dict=user_api_key_dict
)
)
litellm_logging_obj, data = litellm.utils.function_setup(
original_function=route or "IGNORE_THIS",
rules_obj=litellm.utils.Rules(),
start_time=datetime.now(),
**request_data,
)
if "metadata" not in request_data:
request_data["metadata"] = {}
request_data["metadata"].update(user_api_key_logged_metadata)
if litellm_logging_obj is not None:
## UPDATE LOGGING INPUT
_optional_params = {}
_litellm_params = {}
litellm_param_keys = LoggedLiteLLMParams.__annotations__.keys()
for k, v in request_data.items():
if k in litellm_param_keys:
_litellm_params[k] = v
elif k != "model" and k != "user":
_optional_params[k] = v
litellm_logging_obj.update_environment_variables(
model=request_data.get("model", ""),
user=request_data.get("user", ""),
optional_params=_optional_params,
litellm_params=_litellm_params,
)
input: Union[list, str, dict] = ""
if "messages" in request_data and isinstance(
request_data["messages"], list
):
input = request_data["messages"]
litellm_logging_obj.model_call_details["messages"] = input
litellm_logging_obj.call_type = CallTypes.acompletion.value
elif "prompt" in request_data and isinstance(request_data["prompt"], str):
input = request_data["prompt"]
litellm_logging_obj.model_call_details["prompt"] = input
litellm_logging_obj.call_type = CallTypes.atext_completion.value
elif "input" in request_data and isinstance(request_data["input"], list):
input = request_data["input"]
litellm_logging_obj.model_call_details["input"] = input
litellm_logging_obj.call_type = CallTypes.aembedding.value
litellm_logging_obj.pre_call(
input=input,
api_key="",
)
# log the custom exception
await litellm_logging_obj.async_failure_handler(
exception=original_exception,
traceback_exception=traceback.format_exc(),
)
threading.Thread(
target=litellm_logging_obj.failure_handler,
args=(
original_exception,
traceback.format_exc(),
),
).start()
async def post_call_success_hook(
self,