mirror of
https://github.com/meta-llama/llama-stack.git
synced 2025-06-27 18:50:41 +00:00
# What does this PR do? Safety provider `inline::meta-reference` is now deprecated. However, we * aren't checking / printing the deprecation message in `llama stack build` * make the deprecated (unusable) provider So I (1) added checking and (2) made `inline::llama-guard` the default ## Test Plan Before ``` Traceback (most recent call last): File "/home/dalton/.conda/envs/nov22/bin/llama", line 8, in <module> sys.exit(main()) File "/home/dalton/all/llama-stack/llama_stack/cli/llama.py", line 46, in main parser.run(args) File "/home/dalton/all/llama-stack/llama_stack/cli/llama.py", line 40, in run args.func(args) File "/home/dalton/all/llama-stack/llama_stack/cli/stack/build.py", line 177, in _run_stack_build_command self._run_stack_build_command_from_build_config(build_config) File "/home/dalton/all/llama-stack/llama_stack/cli/stack/build.py", line 305, in _run_stack_build_command_from_build_config self._generate_run_config(build_config, build_dir) File "/home/dalton/all/llama-stack/llama_stack/cli/stack/build.py", line 226, in _generate_run_config config_type = instantiate_class_type( File "/home/dalton/all/llama-stack/llama_stack/distribution/utils/dynamic.py", line 12, in instantiate_class_type module = importlib.import_module(module_name) File "/home/dalton/.conda/envs/nov22/lib/python3.10/importlib/__init__.py", line 126, in import_module return _bootstrap._gcd_import(name[level:], package, level) File "<frozen importlib._bootstrap>", line 1050, in _gcd_import File "<frozen importlib._bootstrap>", line 1027, in _find_and_load File "<frozen importlib._bootstrap>", line 1004, in _find_and_load_unlocked ModuleNotFoundError: No module named 'llama_stack.providers.inline.safety.meta_reference' ``` After ``` Traceback (most recent call last): File "/home/dalton/.conda/envs/nov22/bin/llama", line 8, in <module> sys.exit(main()) File "/home/dalton/all/llama-stack/llama_stack/cli/llama.py", line 46, in main parser.run(args) File "/home/dalton/all/llama-stack/llama_stack/cli/llama.py", line 40, in run args.func(args) File "/home/dalton/all/llama-stack/llama_stack/cli/stack/build.py", line 177, in _run_stack_build_command self._run_stack_build_command_from_build_config(build_config) File "/home/dalton/all/llama-stack/llama_stack/cli/stack/build.py", line 309, in _run_stack_build_command_from_build_config self._generate_run_config(build_config, build_dir) File "/home/dalton/all/llama-stack/llama_stack/cli/stack/build.py", line 228, in _generate_run_config raise InvalidProviderError(p.deprecation_error) llama_stack.distribution.resolver.InvalidProviderError: Provider `inline::meta-reference` for API `safety` does not work with the latest Llama Stack. - if you are using Llama Guard v3, please use the `inline::llama-guard` provider instead. - if you are using Prompt Guard, please use the `inline::prompt-guard` provider instead. - if you are using Code Scanner, please use the `inline::code-scanner` provider instead. ``` <img width="469" alt="Screenshot 2024-11-22 at 4 10 24 PM" src="https://github.com/user-attachments/assets/8c2e09fe-379a-4504-b246-7925f80a6ed6"> ## Sources Please link relevant resources if necessary. ## Before submitting - [ ] This PR fixes a typo or improves the docs (you can dismiss the other checks if that's the case). - [x] Ran pre-commit to handle lint / formatting issues. - [ ] Read the [contributor guideline](https://github.com/meta-llama/llama-stack/blob/main/CONTRIBUTING.md), Pull Request section? - [ ] Updated relevant documentation. - [ ] Wrote necessary unit or integration tests.
88 lines
3.2 KiB
Python
88 lines
3.2 KiB
Python
# Copyright (c) Meta Platforms, Inc. and affiliates.
|
|
# All rights reserved.
|
|
#
|
|
# This source code is licensed under the terms described in the LICENSE file in
|
|
# the root directory of this source tree.
|
|
|
|
from typing import List
|
|
|
|
from llama_stack.distribution.datatypes import (
|
|
AdapterSpec,
|
|
Api,
|
|
InlineProviderSpec,
|
|
ProviderSpec,
|
|
remote_provider_spec,
|
|
)
|
|
|
|
|
|
def available_providers() -> List[ProviderSpec]:
|
|
return [
|
|
InlineProviderSpec(
|
|
api=Api.safety,
|
|
provider_type="inline::prompt-guard",
|
|
pip_packages=[
|
|
"transformers",
|
|
"torch --index-url https://download.pytorch.org/whl/cpu",
|
|
],
|
|
module="llama_stack.providers.inline.safety.prompt_guard",
|
|
config_class="llama_stack.providers.inline.safety.prompt_guard.PromptGuardConfig",
|
|
),
|
|
InlineProviderSpec(
|
|
api=Api.safety,
|
|
provider_type="inline::meta-reference",
|
|
pip_packages=[
|
|
"transformers",
|
|
"torch --index-url https://download.pytorch.org/whl/cpu",
|
|
],
|
|
module="llama_stack.providers.inline.safety.meta_reference",
|
|
config_class="llama_stack.providers.inline.safety.meta_reference.SafetyConfig",
|
|
api_dependencies=[
|
|
Api.inference,
|
|
],
|
|
deprecation_error="""
|
|
Provider `inline::meta-reference` for API `safety` does not work with the latest Llama Stack.
|
|
|
|
- if you are using Llama Guard v3, please use the `inline::llama-guard` provider instead.
|
|
- if you are using Prompt Guard, please use the `inline::prompt-guard` provider instead.
|
|
- if you are using Code Scanner, please use the `inline::code-scanner` provider instead.
|
|
|
|
""",
|
|
),
|
|
InlineProviderSpec(
|
|
api=Api.safety,
|
|
provider_type="inline::llama-guard",
|
|
pip_packages=[],
|
|
module="llama_stack.providers.inline.safety.llama_guard",
|
|
config_class="llama_stack.providers.inline.safety.llama_guard.LlamaGuardConfig",
|
|
api_dependencies=[
|
|
Api.inference,
|
|
],
|
|
),
|
|
InlineProviderSpec(
|
|
api=Api.safety,
|
|
provider_type="inline::code-scanner",
|
|
pip_packages=[
|
|
"codeshield",
|
|
],
|
|
module="llama_stack.providers.inline.safety.code_scanner",
|
|
config_class="llama_stack.providers.inline.safety.code_scanner.CodeScannerConfig",
|
|
),
|
|
remote_provider_spec(
|
|
api=Api.safety,
|
|
adapter=AdapterSpec(
|
|
adapter_type="sample",
|
|
pip_packages=[],
|
|
module="llama_stack.providers.remote.safety.sample",
|
|
config_class="llama_stack.providers.remote.safety.sample.SampleConfig",
|
|
),
|
|
),
|
|
remote_provider_spec(
|
|
api=Api.safety,
|
|
adapter=AdapterSpec(
|
|
adapter_type="bedrock",
|
|
pip_packages=["boto3"],
|
|
module="llama_stack.providers.remote.safety.bedrock",
|
|
config_class="llama_stack.providers.remote.safety.bedrock.BedrockSafetyConfig",
|
|
),
|
|
),
|
|
]
|