From b007b062f370df3eaf33c90e12bd258678175b78 Mon Sep 17 00:00:00 2001 From: Dalton Flanagan <6599399+dltn@users.noreply.github.com> Date: Fri, 22 Nov 2024 16:23:44 -0500 Subject: [PATCH] Fix `llama stack build` in 0.0.54 (#505) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit # What does this PR do? Safety provider `inline::meta-reference` is now deprecated. However, we * aren't checking / printing the deprecation message in `llama stack build` * make the deprecated (unusable) provider So I (1) added checking and (2) made `inline::llama-guard` the default ## Test Plan Before ``` Traceback (most recent call last): File "/home/dalton/.conda/envs/nov22/bin/llama", line 8, in sys.exit(main()) File "/home/dalton/all/llama-stack/llama_stack/cli/llama.py", line 46, in main parser.run(args) File "/home/dalton/all/llama-stack/llama_stack/cli/llama.py", line 40, in run args.func(args) File "/home/dalton/all/llama-stack/llama_stack/cli/stack/build.py", line 177, in _run_stack_build_command self._run_stack_build_command_from_build_config(build_config) File "/home/dalton/all/llama-stack/llama_stack/cli/stack/build.py", line 305, in _run_stack_build_command_from_build_config self._generate_run_config(build_config, build_dir) File "/home/dalton/all/llama-stack/llama_stack/cli/stack/build.py", line 226, in _generate_run_config config_type = instantiate_class_type( File "/home/dalton/all/llama-stack/llama_stack/distribution/utils/dynamic.py", line 12, in instantiate_class_type module = importlib.import_module(module_name) File "/home/dalton/.conda/envs/nov22/lib/python3.10/importlib/__init__.py", line 126, in import_module return _bootstrap._gcd_import(name[level:], package, level) File "", line 1050, in _gcd_import File "", line 1027, in _find_and_load File "", line 1004, in _find_and_load_unlocked ModuleNotFoundError: No module named 'llama_stack.providers.inline.safety.meta_reference' ``` After ``` Traceback (most recent call last): File "/home/dalton/.conda/envs/nov22/bin/llama", line 8, in sys.exit(main()) File "/home/dalton/all/llama-stack/llama_stack/cli/llama.py", line 46, in main parser.run(args) File "/home/dalton/all/llama-stack/llama_stack/cli/llama.py", line 40, in run args.func(args) File "/home/dalton/all/llama-stack/llama_stack/cli/stack/build.py", line 177, in _run_stack_build_command self._run_stack_build_command_from_build_config(build_config) File "/home/dalton/all/llama-stack/llama_stack/cli/stack/build.py", line 309, in _run_stack_build_command_from_build_config self._generate_run_config(build_config, build_dir) File "/home/dalton/all/llama-stack/llama_stack/cli/stack/build.py", line 228, in _generate_run_config raise InvalidProviderError(p.deprecation_error) llama_stack.distribution.resolver.InvalidProviderError: Provider `inline::meta-reference` for API `safety` does not work with the latest Llama Stack. - if you are using Llama Guard v3, please use the `inline::llama-guard` provider instead. - if you are using Prompt Guard, please use the `inline::prompt-guard` provider instead. - if you are using Code Scanner, please use the `inline::code-scanner` provider instead. ``` Screenshot 2024-11-22 at 4 10 24 PM ## Sources Please link relevant resources if necessary. ## Before submitting - [ ] This PR fixes a typo or improves the docs (you can dismiss the other checks if that's the case). - [x] Ran pre-commit to handle lint / formatting issues. - [ ] Read the [contributor guideline](https://github.com/meta-llama/llama-stack/blob/main/CONTRIBUTING.md), Pull Request section? - [ ] Updated relevant documentation. - [ ] Wrote necessary unit or integration tests. --- llama_stack/cli/stack/build.py | 6 +++++- llama_stack/providers/registry/safety.py | 20 ++++++++++---------- 2 files changed, 15 insertions(+), 11 deletions(-) diff --git a/llama_stack/cli/stack/build.py b/llama_stack/cli/stack/build.py index ce1ed2747..01b7dae66 100644 --- a/llama_stack/cli/stack/build.py +++ b/llama_stack/cli/stack/build.py @@ -16,9 +16,9 @@ from pathlib import Path import pkg_resources from llama_stack.distribution.distribution import get_provider_registry +from llama_stack.distribution.resolver import InvalidProviderError from llama_stack.distribution.utils.dynamic import instantiate_class_type - TEMPLATES_PATH = Path(os.path.relpath(__file__)).parent.parent.parent / "templates" @@ -223,6 +223,10 @@ class StackBuild(Subcommand): for i, provider_type in enumerate(provider_types): pid = provider_type.split("::")[-1] + p = provider_registry[Api(api)][provider_type] + if p.deprecation_error: + raise InvalidProviderError(p.deprecation_error) + config_type = instantiate_class_type( provider_registry[Api(api)][provider_type].config_class ) diff --git a/llama_stack/providers/registry/safety.py b/llama_stack/providers/registry/safety.py index 77dd823eb..99b0d2bd8 100644 --- a/llama_stack/providers/registry/safety.py +++ b/llama_stack/providers/registry/safety.py @@ -17,6 +17,16 @@ from llama_stack.distribution.datatypes import ( def available_providers() -> List[ProviderSpec]: return [ + InlineProviderSpec( + api=Api.safety, + provider_type="inline::prompt-guard", + pip_packages=[ + "transformers", + "torch --index-url https://download.pytorch.org/whl/cpu", + ], + module="llama_stack.providers.inline.safety.prompt_guard", + config_class="llama_stack.providers.inline.safety.prompt_guard.PromptGuardConfig", + ), InlineProviderSpec( api=Api.safety, provider_type="inline::meta-reference", @@ -48,16 +58,6 @@ Provider `inline::meta-reference` for API `safety` does not work with the latest Api.inference, ], ), - InlineProviderSpec( - api=Api.safety, - provider_type="inline::prompt-guard", - pip_packages=[ - "transformers", - "torch --index-url https://download.pytorch.org/whl/cpu", - ], - module="llama_stack.providers.inline.safety.prompt_guard", - config_class="llama_stack.providers.inline.safety.prompt_guard.PromptGuardConfig", - ), InlineProviderSpec( api=Api.safety, provider_type="inline::code-scanner",