mirror of
https://github.com/meta-llama/llama-stack.git
synced 2025-06-27 18:50:41 +00:00
fix: Add 'accelerate' dependency to 'prompt-guard' (#1724)
Required to startup a distribution with prompt guard Closes: #1723 ## Test Plan distribution starts with patch applied Signed-off-by: Derek Higgins <derekh@redhat.com>
This commit is contained in:
parent
dce9a24a6c
commit
00917ef5b2
1 changed files with 1 additions and 1 deletions
|
@ -21,7 +21,7 @@ def available_providers() -> List[ProviderSpec]:
|
|||
api=Api.safety,
|
||||
provider_type="inline::prompt-guard",
|
||||
pip_packages=[
|
||||
"transformers",
|
||||
"transformers[accelerate]",
|
||||
"torch --index-url https://download.pytorch.org/whl/cpu",
|
||||
],
|
||||
module="llama_stack.providers.inline.safety.prompt_guard",
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue