mirror of
https://github.com/meta-llama/llama-stack.git
synced 2025-06-28 02:53:30 +00:00
Lint check in main branch is failing. This fixes the lint check after we moved to ruff in https://github.com/meta-llama/llama-stack/pull/921. We need to move to a `ruff.toml` file as well as fixing and ignoring some additional checks. Signed-off-by: Yuan Tang <terrytangyuan@gmail.com>
54 lines
1.7 KiB
Python
54 lines
1.7 KiB
Python
# Copyright (c) Meta Platforms, Inc. and affiliates.
|
|
# All rights reserved.
|
|
#
|
|
# This source code is licensed under the terms described in the LICENSE file in
|
|
# the root directory of this source tree.
|
|
|
|
import asyncio
|
|
import logging
|
|
|
|
from typing import List
|
|
|
|
from llama_stack.apis.inference import Message
|
|
|
|
from llama_stack.apis.safety import Safety, SafetyViolation, ViolationLevel
|
|
|
|
log = logging.getLogger(__name__)
|
|
|
|
|
|
class SafetyException(Exception): # noqa: N818
|
|
def __init__(self, violation: SafetyViolation):
|
|
self.violation = violation
|
|
super().__init__(violation.user_message)
|
|
|
|
|
|
class ShieldRunnerMixin:
|
|
def __init__(
|
|
self,
|
|
safety_api: Safety,
|
|
input_shields: List[str] = None,
|
|
output_shields: List[str] = None,
|
|
):
|
|
self.safety_api = safety_api
|
|
self.input_shields = input_shields
|
|
self.output_shields = output_shields
|
|
|
|
async def run_multiple_shields(self, messages: List[Message], identifiers: List[str]) -> None:
|
|
responses = await asyncio.gather(
|
|
*[
|
|
self.safety_api.run_shield(
|
|
shield_id=identifier,
|
|
messages=messages,
|
|
)
|
|
for identifier in identifiers
|
|
]
|
|
)
|
|
for identifier, response in zip(identifiers, responses):
|
|
if not response.violation:
|
|
continue
|
|
|
|
violation = response.violation
|
|
if violation.violation_level == ViolationLevel.ERROR:
|
|
raise SafetyException(violation)
|
|
elif violation.violation_level == ViolationLevel.WARN:
|
|
log.warning(f"[Warn]{identifier} raised a warning")
|