forked from phoenix/litellm-mirror
refactor redact_message_input_output_from_custom_logger
This commit is contained in:
parent
715387c3c0
commit
b86075ef9a
1 changed files with 52 additions and 37 deletions
|
@ -11,6 +11,7 @@ import copy
|
||||||
from typing import TYPE_CHECKING, Any
|
from typing import TYPE_CHECKING, Any
|
||||||
|
|
||||||
import litellm
|
import litellm
|
||||||
|
from litellm.integrations.custom_logger import CustomLogger
|
||||||
|
|
||||||
if TYPE_CHECKING:
|
if TYPE_CHECKING:
|
||||||
from litellm.litellm_core_utils.litellm_logging import (
|
from litellm.litellm_core_utils.litellm_logging import (
|
||||||
|
@ -22,6 +23,56 @@ else:
|
||||||
LiteLLMLoggingObject = Any
|
LiteLLMLoggingObject = Any
|
||||||
|
|
||||||
|
|
||||||
|
def redact_message_input_output_from_custom_logger(
|
||||||
|
litellm_logging_obj: LiteLLMLoggingObject, result, custom_logger: CustomLogger
|
||||||
|
):
|
||||||
|
if (
|
||||||
|
hasattr(custom_logger, "message_logging")
|
||||||
|
and custom_logger.message_logging is not True
|
||||||
|
):
|
||||||
|
return perform_redaction(litellm_logging_obj, result)
|
||||||
|
return result
|
||||||
|
|
||||||
|
|
||||||
|
def perform_redaction(litellm_logging_obj: LiteLLMLoggingObject, result):
|
||||||
|
"""
|
||||||
|
Performs the actual redaction on the logging object and result.
|
||||||
|
"""
|
||||||
|
# Redact model_call_details
|
||||||
|
litellm_logging_obj.model_call_details["messages"] = [
|
||||||
|
{"role": "user", "content": "redacted-by-litellm"}
|
||||||
|
]
|
||||||
|
litellm_logging_obj.model_call_details["prompt"] = ""
|
||||||
|
litellm_logging_obj.model_call_details["input"] = ""
|
||||||
|
|
||||||
|
# Redact streaming response
|
||||||
|
if (
|
||||||
|
litellm_logging_obj.stream is True
|
||||||
|
and "complete_streaming_response" in litellm_logging_obj.model_call_details
|
||||||
|
):
|
||||||
|
_streaming_response = litellm_logging_obj.model_call_details[
|
||||||
|
"complete_streaming_response"
|
||||||
|
]
|
||||||
|
for choice in _streaming_response.choices:
|
||||||
|
if isinstance(choice, litellm.Choices):
|
||||||
|
choice.message.content = "redacted-by-litellm"
|
||||||
|
elif isinstance(choice, litellm.utils.StreamingChoices):
|
||||||
|
choice.delta.content = "redacted-by-litellm"
|
||||||
|
|
||||||
|
# Redact result
|
||||||
|
if result is not None and isinstance(result, litellm.ModelResponse):
|
||||||
|
_result = copy.deepcopy(result)
|
||||||
|
if hasattr(_result, "choices") and _result.choices is not None:
|
||||||
|
for choice in _result.choices:
|
||||||
|
if isinstance(choice, litellm.Choices):
|
||||||
|
choice.message.content = "redacted-by-litellm"
|
||||||
|
elif isinstance(choice, litellm.utils.StreamingChoices):
|
||||||
|
choice.delta.content = "redacted-by-litellm"
|
||||||
|
return _result
|
||||||
|
|
||||||
|
return result
|
||||||
|
|
||||||
|
|
||||||
def redact_message_input_output_from_logging(
|
def redact_message_input_output_from_logging(
|
||||||
litellm_logging_obj: LiteLLMLoggingObject, result
|
litellm_logging_obj: LiteLLMLoggingObject, result
|
||||||
):
|
):
|
||||||
|
@ -50,43 +101,7 @@ def redact_message_input_output_from_logging(
|
||||||
):
|
):
|
||||||
return result
|
return result
|
||||||
|
|
||||||
# remove messages, prompts, input, response from logging
|
return perform_redaction(litellm_logging_obj, result)
|
||||||
litellm_logging_obj.model_call_details["messages"] = [
|
|
||||||
{"role": "user", "content": "redacted-by-litellm"}
|
|
||||||
]
|
|
||||||
litellm_logging_obj.model_call_details["prompt"] = ""
|
|
||||||
litellm_logging_obj.model_call_details["input"] = ""
|
|
||||||
|
|
||||||
# response cleaning
|
|
||||||
# ChatCompletion Responses
|
|
||||||
if (
|
|
||||||
litellm_logging_obj.stream is True
|
|
||||||
and "complete_streaming_response" in litellm_logging_obj.model_call_details
|
|
||||||
):
|
|
||||||
_streaming_response = litellm_logging_obj.model_call_details[
|
|
||||||
"complete_streaming_response"
|
|
||||||
]
|
|
||||||
for choice in _streaming_response.choices:
|
|
||||||
if isinstance(choice, litellm.Choices):
|
|
||||||
choice.message.content = "redacted-by-litellm"
|
|
||||||
elif isinstance(choice, litellm.utils.StreamingChoices):
|
|
||||||
choice.delta.content = "redacted-by-litellm"
|
|
||||||
else:
|
|
||||||
if result is not None:
|
|
||||||
if isinstance(result, litellm.ModelResponse):
|
|
||||||
# only deep copy litellm.ModelResponse
|
|
||||||
_result = copy.deepcopy(result)
|
|
||||||
if hasattr(_result, "choices") and _result.choices is not None:
|
|
||||||
for choice in _result.choices:
|
|
||||||
if isinstance(choice, litellm.Choices):
|
|
||||||
choice.message.content = "redacted-by-litellm"
|
|
||||||
elif isinstance(choice, litellm.utils.StreamingChoices):
|
|
||||||
choice.delta.content = "redacted-by-litellm"
|
|
||||||
|
|
||||||
return _result
|
|
||||||
|
|
||||||
# by default return result
|
|
||||||
return result
|
|
||||||
|
|
||||||
|
|
||||||
def redact_user_api_key_info(metadata: dict) -> dict:
|
def redact_user_api_key_info(metadata: dict) -> dict:
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue