From e587d3205831ba9cb474aa7eb62499e6be44c8b6 Mon Sep 17 00:00:00 2001 From: Krrish Dholakia Date: Wed, 17 Jul 2024 15:39:31 -0700 Subject: [PATCH] docs(scrub_data.md): add scrub logging data tutorial to docs --- .../docs/observability/scrub_data.md | 97 +++++++++++++++++++ docs/my-website/sidebars.js | 3 +- 2 files changed, 99 insertions(+), 1 deletion(-) create mode 100644 docs/my-website/docs/observability/scrub_data.md diff --git a/docs/my-website/docs/observability/scrub_data.md b/docs/my-website/docs/observability/scrub_data.md new file mode 100644 index 000000000..f8bb4d556 --- /dev/null +++ b/docs/my-website/docs/observability/scrub_data.md @@ -0,0 +1,97 @@ +# Scrub Logged Data + +Redact messages / mask PII before sending data to logging integrations (langfuse/etc.). + +See our [**Presidio PII Masking**](https://github.com/BerriAI/litellm/blob/a176feeacc5fdf504747978d82056eb84679c4be/litellm/proxy/hooks/presidio_pii_masking.py#L286) for reference. + +1. Setup a custom callback + +```python +from litellm.integrations.custom_logger import CustomLogger + +class MyCustomHandler(CustomLogger): + async def async_logging_hook( + self, kwargs: dict, result: Any, call_type: str + ) -> Tuple[dict, Any]: + """ + For masking logged request/response. Return a modified version of the request/result. + + Called before `async_log_success_event`. + """ + if ( + call_type == "completion" or call_type == "acompletion" + ): # /chat/completions requests + messages: Optional[List] = kwargs.get("messages", None) + + kwargs["messages"] = [{"role": "user", "content": "MASK_THIS_ASYNC_VALUE"}] + + return kwargs, responses + + def logging_hook( + self, kwargs: dict, result: Any, call_type: str + ) -> Tuple[dict, Any]: + """ + For masking logged request/response. Return a modified version of the request/result. + + Called before `log_success_event`. + """ + if ( + call_type == "completion" or call_type == "acompletion" + ): # /chat/completions requests + messages: Optional[List] = kwargs.get("messages", None) + + kwargs["messages"] = [{"role": "user", "content": "MASK_THIS_SYNC_VALUE"}] + + return kwargs, responses + + +customHandler = MyCustomHandler() +``` + + +2. Connect custom handler to LiteLLM + +```python +import litellm + +litellm.callbacks = [customHandler] +``` + +3. Test it! + +```python +# pip install langfuse + +import os +import litellm +from litellm import completion + +os.environ["LANGFUSE_PUBLIC_KEY"] = "" +os.environ["LANGFUSE_SECRET_KEY"] = "" +# Optional, defaults to https://cloud.langfuse.com +os.environ["LANGFUSE_HOST"] # optional +# LLM API Keys +os.environ['OPENAI_API_KEY']="" + +litellm.callbacks = [customHandler] +litellm.success_callback = ["langfuse"] + + + +## sync +response = completion(model="gpt-3.5-turbo", messages=[{ "role": "user", "content": "Hi 👋 - i'm openai"}], + stream=True) +for chunk in response: + continue + + +## async +import asyncio + +def async completion(): + response = await acompletion(model="gpt-3.5-turbo", messages=[{ "role": "user", "content": "Hi 👋 - i'm openai"}], + stream=True) + async for chunk in response: + continue +asyncio.run(completion()) +``` \ No newline at end of file diff --git a/docs/my-website/sidebars.js b/docs/my-website/sidebars.js index 094fc7aec..d2179cafc 100644 --- a/docs/my-website/sidebars.js +++ b/docs/my-website/sidebars.js @@ -188,12 +188,13 @@ const sidebars = { type: "category", label: "Logging & Observability", items: [ - "observability/helicone_integration", "observability/langfuse_integration", "observability/logfire_integration", "debugging/local_debugging", "observability/raw_request_response", "observability/custom_callback", + "observability/scrub_data", + "observability/helicone_integration", "observability/sentry", "observability/lago", "observability/openmeter",