feat(redact_messages.py): allow remove sensitive key information before passing to logging integration

This commit is contained in:
Krrish Dholakia 2024-07-22 20:58:02 -07:00
parent 0b9e93d863
commit 548e4f53f8
7 changed files with 65 additions and 4 deletions

View file

@ -48,6 +48,20 @@ A number of these headers could be useful for troubleshooting, but the
`x-litellm-call-id` is the one that is most useful for tracking a request across `x-litellm-call-id` is the one that is most useful for tracking a request across
components in your system, including in logging tools. components in your system, including in logging tools.
## Redacting UserAPIKeyInfo
Redact information about the user api key (hashed token, user_id, team id, etc.), from logs.
Currently supported for Langfuse, OpenTelemetry, Logfire, ArizeAI logging.
```yaml
litellm_settings:
callbacks: ["langfuse"]
redact_user_api_key_info: true
```
Removes any field with `user_api_key_*` from metadata.
## Logging Proxy Input/Output - Langfuse ## Logging Proxy Input/Output - Langfuse
We will use the `--config` to set `litellm.success_callback = ["langfuse"]` this will log all successfull LLM calls to langfuse. Make sure to set `LANGFUSE_PUBLIC_KEY` and `LANGFUSE_SECRET_KEY` in your environment We will use the `--config` to set `litellm.success_callback = ["langfuse"]` this will log all successfull LLM calls to langfuse. Make sure to set `LANGFUSE_PUBLIC_KEY` and `LANGFUSE_SECRET_KEY` in your environment

View file

@ -76,6 +76,7 @@ post_call_rules: List[Callable] = []
turn_off_message_logging: Optional[bool] = False turn_off_message_logging: Optional[bool] = False
log_raw_request_response: bool = False log_raw_request_response: bool = False
redact_messages_in_exceptions: Optional[bool] = False redact_messages_in_exceptions: Optional[bool] = False
redact_user_api_key_info: Optional[bool] = False
store_audit_logs = False # Enterprise feature, allow users to see audit logs store_audit_logs = False # Enterprise feature, allow users to see audit logs
## end of callbacks ############# ## end of callbacks #############

View file

@ -8,6 +8,7 @@ from packaging.version import Version
import litellm import litellm
from litellm._logging import verbose_logger from litellm._logging import verbose_logger
from litellm.litellm_core_utils.redact_messages import redact_user_api_key_info
class LangFuseLogger: class LangFuseLogger:
@ -382,6 +383,8 @@ class LangFuseLogger:
mask_input = clean_metadata.pop("mask_input", False) mask_input = clean_metadata.pop("mask_input", False)
mask_output = clean_metadata.pop("mask_output", False) mask_output = clean_metadata.pop("mask_output", False)
clean_metadata = redact_user_api_key_info(metadata=clean_metadata)
if trace_name is None and existing_trace_id is None: if trace_name is None and existing_trace_id is None:
# just log `litellm-{call_type}` as the trace name # just log `litellm-{call_type}` as the trace name
## DO NOT SET TRACE_NAME if trace-id set. this can lead to overwriting of past traces. ## DO NOT SET TRACE_NAME if trace-id set. this can lead to overwriting of past traces.

View file

@ -1,17 +1,21 @@
#### What this does #### #### What this does ####
# On success + failure, log events to Logfire # On success + failure, log events to Logfire
import dotenv, os import os
import dotenv
dotenv.load_dotenv() # Loading env variables using dotenv dotenv.load_dotenv() # Loading env variables using dotenv
import traceback import traceback
import uuid import uuid
from litellm._logging import print_verbose, verbose_logger
from enum import Enum from enum import Enum
from typing import Any, Dict, NamedTuple from typing import Any, Dict, NamedTuple
from typing_extensions import LiteralString from typing_extensions import LiteralString
from litellm._logging import print_verbose, verbose_logger
from litellm.litellm_core_utils.redact_messages import redact_user_api_key_info
class SpanConfig(NamedTuple): class SpanConfig(NamedTuple):
message_template: LiteralString message_template: LiteralString
@ -135,6 +139,8 @@ class LogfireLogger:
else: else:
clean_metadata[key] = value clean_metadata[key] = value
clean_metadata = redact_user_api_key_info(metadata=clean_metadata)
# Build the initial payload # Build the initial payload
payload = { payload = {
"id": id, "id": id,

View file

@ -7,6 +7,7 @@ from typing import TYPE_CHECKING, Any, Dict, Optional, Union
import litellm import litellm
from litellm._logging import verbose_logger from litellm._logging import verbose_logger
from litellm.integrations.custom_logger import CustomLogger from litellm.integrations.custom_logger import CustomLogger
from litellm.litellm_core_utils.redact_messages import redact_user_api_key_info
from litellm.types.services import ServiceLoggerPayload from litellm.types.services import ServiceLoggerPayload
if TYPE_CHECKING: if TYPE_CHECKING:
@ -315,7 +316,9 @@ class OpenTelemetry(CustomLogger):
############################################# #############################################
metadata = litellm_params.get("metadata", {}) or {} metadata = litellm_params.get("metadata", {}) or {}
for key, value in metadata.items(): clean_metadata = redact_user_api_key_info(metadata=metadata)
for key, value in clean_metadata.items():
if self.is_primitive(value): if self.is_primitive(value):
span.set_attribute("metadata.{}".format(key), value) span.set_attribute("metadata.{}".format(key), value)

View file

@ -87,3 +87,33 @@ def redact_message_input_output_from_logging(
# by default return result # by default return result
return result return result
def redact_user_api_key_info(metadata: dict) -> dict:
"""
removes any user_api_key_info before passing to logging object, if flag set
Usage:
SDK
```python
litellm.redact_user_api_key_info = True
```
PROXY:
```yaml
litellm_settings:
redact_user_api_key_info: true
```
"""
if litellm.redact_user_api_key_info is not True:
return metadata
new_metadata = {}
for k, v in metadata.items():
if isinstance(k, str) and k.startswith("user_api_key"):
pass
else:
new_metadata[k] = v
return new_metadata

View file

@ -3,3 +3,7 @@ model_list:
litellm_params: litellm_params:
model: groq/llama3-groq-70b-8192-tool-use-preview model: groq/llama3-groq-70b-8192-tool-use-preview
api_key: os.environ/GROQ_API_KEY api_key: os.environ/GROQ_API_KEY
litellm_settings:
callbacks: ["logfire"]
redact_user_api_key_info: true