mirror of
https://github.com/meta-llama/llama-stack.git
synced 2025-12-30 15:54:17 +00:00
Merge branch 'main' into add-watsonx-inference-adapter
This commit is contained in:
commit
363e2565f5
162 changed files with 3845 additions and 3126 deletions
|
|
@ -21,7 +21,7 @@ def available_providers() -> List[ProviderSpec]:
|
|||
api=Api.safety,
|
||||
provider_type="inline::prompt-guard",
|
||||
pip_packages=[
|
||||
"transformers",
|
||||
"transformers[accelerate]",
|
||||
"torch --index-url https://download.pytorch.org/whl/cpu",
|
||||
],
|
||||
module="llama_stack.providers.inline.safety.prompt_guard",
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue