mirror of
https://github.com/meta-llama/llama-stack.git
synced 2025-10-04 20:14:13 +00:00
28 lines
1 KiB
Python
28 lines
1 KiB
Python
from codeshield.cs import CodeShield
|
|
from termcolor import cprint
|
|
|
|
from .base import ShieldResponse, TextShield
|
|
from llama_toolchain.safety.api.datatypes import * # noqa: F403
|
|
|
|
|
|
class CodeScannerShield(TextShield):
|
|
|
|
def get_shield_type(self) -> ShieldType:
|
|
return BuiltinShield.code_scanner_guard
|
|
|
|
async def run_impl(self, text: str) -> ShieldResponse:
|
|
cprint(f"Running CodeScannerShield on {text[50:]}", color="magenta")
|
|
result = await CodeShield.scan_code(text)
|
|
if result.is_insecure:
|
|
return ShieldResponse(
|
|
shield_type=BuiltinShield.code_scanner_guard,
|
|
is_violation=True,
|
|
violation_type=",".join(
|
|
[issue.pattern_id for issue in result.issues_found]
|
|
),
|
|
violation_return_message="Sorry, I found security concerns in the code.",
|
|
)
|
|
else:
|
|
return ShieldResponse(
|
|
shield_type=BuiltinShield.code_scanner_guard, is_violation=False
|
|
)
|