diff --git a/litellm/proxy/_new_secret_config.yaml b/litellm/proxy/_new_secret_config.yaml index eff98ae67..6df2e0825 100644 --- a/litellm/proxy/_new_secret_config.yaml +++ b/litellm/proxy/_new_secret_config.yaml @@ -1,4 +1,10 @@ model_list: - model_name: "*" litellm_params: - model: "*" \ No newline at end of file + model: "*" + +litellm_settings: + guardrails: + - prompt_injection: # your custom name for guardrail + callbacks: [lakera_prompt_injection] # litellm callbacks to use + default_on: true # will run on all llm requests when true \ No newline at end of file diff --git a/litellm/types/utils.py b/litellm/types/utils.py index 7f734482c..1a05c1d2a 100644 --- a/litellm/types/utils.py +++ b/litellm/types/utils.py @@ -2,7 +2,7 @@ import json import time import uuid from enum import Enum -from typing import Dict, List, Literal, Optional, Tuple, Union +from typing import Any, Dict, List, Literal, Optional, Tuple, Union from openai._models import BaseModel as OpenAIObject from pydantic import ConfigDict, Field, PrivateAttr @@ -219,7 +219,7 @@ class ChatCompletionDeltaToolCall(OpenAIObject): class HiddenParams(OpenAIObject): - original_response: Optional[str] = None + original_response: Optional[Union[str, Any]] = None model_id: Optional[str] = None # used in Router for individual deployments api_base: Optional[str] = None # returns api base used for making completion call