mirror of
https://github.com/meta-llama/llama-stack.git
synced 2025-07-29 15:23:51 +00:00
kill with_safety(), not needed
This commit is contained in:
parent
59f1fe5af8
commit
cd4880126b
2 changed files with 2 additions and 16 deletions
|
@ -38,7 +38,7 @@ from .tools.builtin import (
|
||||||
SearchTool,
|
SearchTool,
|
||||||
WolframAlphaTool,
|
WolframAlphaTool,
|
||||||
)
|
)
|
||||||
from .tools.safety import with_safety
|
from .tools.safety import SafeTool
|
||||||
|
|
||||||
|
|
||||||
def make_random_string(length: int = 8):
|
def make_random_string(length: int = 8):
|
||||||
|
@ -87,7 +87,7 @@ class ChatAgent(ShieldRunnerMixin):
|
||||||
continue
|
continue
|
||||||
|
|
||||||
builtin_tools.append(
|
builtin_tools.append(
|
||||||
with_safety(
|
SafeTool(
|
||||||
tool,
|
tool,
|
||||||
safety_api,
|
safety_api,
|
||||||
tool_defn.input_shields,
|
tool_defn.input_shields,
|
||||||
|
|
|
@ -41,17 +41,3 @@ class SafeTool(BaseTool, ShieldRunnerMixin):
|
||||||
await self.run_multiple_shields(messages, self.output_shields)
|
await self.run_multiple_shields(messages, self.output_shields)
|
||||||
|
|
||||||
return res
|
return res
|
||||||
|
|
||||||
|
|
||||||
def with_safety(
|
|
||||||
tool: BaseTool,
|
|
||||||
safety_api: Safety,
|
|
||||||
input_shields: List[str] = None,
|
|
||||||
output_shields: List[str] = None,
|
|
||||||
) -> SafeTool:
|
|
||||||
return SafeTool(
|
|
||||||
tool,
|
|
||||||
safety_api,
|
|
||||||
input_shields=input_shields,
|
|
||||||
output_shields=output_shields,
|
|
||||||
)
|
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue