Use inference APIs for executing Llama Guard

This commit is contained in:
Ashwin Bharambe 2024-09-25 19:40:49 -07:00
parent 6236634d84
commit 37ca22cda6
3 changed files with 94 additions and 164 deletions

View file

@ -21,10 +21,9 @@ def available_providers() -> List[ProviderSpec]:
api=Api.safety,
provider_id="meta-reference",
pip_packages=[
"accelerate",
"codeshield",
"torch",
"transformers",
"torch --index-url https://download.pytorch.org/whl/cpu",
],
module="llama_stack.providers.impls.meta_reference.safety",
config_class="llama_stack.providers.impls.meta_reference.safety.SafetyConfig",