mirror of
https://github.com/BerriAI/litellm.git
synced 2025-04-26 03:04:13 +00:00
build(pyproject.toml): add new dev dependencies - for type checking (#9631)
* build(pyproject.toml): add new dev dependencies - for type checking * build: reformat files to fit black * ci: reformat to fit black * ci(test-litellm.yml): make tests run clear * build(pyproject.toml): add ruff * fix: fix ruff checks * build(mypy/): fix mypy linting errors * fix(hashicorp_secret_manager.py): fix passing cert for tls auth * build(mypy/): resolve all mypy errors * test: update test * fix: fix black formatting * build(pre-commit-config.yaml): use poetry run black * fix(proxy_server.py): fix linting error * fix: fix ruff safe representation error
This commit is contained in:
parent
95e5dfae5a
commit
9b7ebb6a7d
214 changed files with 1553 additions and 1433 deletions
|
@ -148,9 +148,9 @@ class lakeraAI_Moderation(CustomGuardrail):
|
|||
text = ""
|
||||
_json_data: str = ""
|
||||
if "messages" in data and isinstance(data["messages"], list):
|
||||
prompt_injection_obj: Optional[GuardrailItem] = (
|
||||
litellm.guardrail_name_config_map.get("prompt_injection")
|
||||
)
|
||||
prompt_injection_obj: Optional[
|
||||
GuardrailItem
|
||||
] = litellm.guardrail_name_config_map.get("prompt_injection")
|
||||
if prompt_injection_obj is not None:
|
||||
enabled_roles = prompt_injection_obj.enabled_roles
|
||||
else:
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue