diff --git a/llama_stack/cli/model/prompt_format.py b/llama_stack/cli/model/prompt_format.py index 67f456175..5fdfb51a6 100644 --- a/llama_stack/cli/model/prompt_format.py +++ b/llama_stack/cli/model/prompt_format.py @@ -43,7 +43,7 @@ class ModelPromptFormat(Subcommand): ) def _run_model_template_cmd(self, args: argparse.Namespace) -> None: - import pkg_resources + import importlib.resources # Only Llama 3.1 and 3.2 are supported supported_model_ids = [ @@ -64,25 +64,26 @@ class ModelPromptFormat(Subcommand): f"{model_id} is not a valid Model. Choose one from --\n {model_str}" ) - llama_3_1_file = pkg_resources.resource_filename( - "llama_models", "llama3_1/prompt_format.md" + llama_3_1_file = ( + importlib.resources.files("llama_models") / "llama3_1/prompt_format.md" ) - llama_3_2_text_file = pkg_resources.resource_filename( - "llama_models", "llama3_2/text_prompt_format.md" + llama_3_2_text_file = ( + importlib.resources.files("llama_models") / "llama3_2/text_prompt_format.md" ) - llama_3_2_vision_file = pkg_resources.resource_filename( - "llama_models", "llama3_2/vision_prompt_format.md" + llama_3_2_vision_file = ( + importlib.resources.files("llama_models") + / "llama3_2/vision_prompt_format.md" ) if model_family(model_id) == ModelFamily.llama3_1: - with open(llama_3_1_file, "r") as f: - content = f.read() + with importlib.resources.as_file(llama_3_1_file) as f: + content = f.open("r").read() elif model_family(model_id) == ModelFamily.llama3_2: if is_multimodal(model_id): - with open(llama_3_2_vision_file, "r") as f: - content = f.read() + with importlib.resources.as_file(llama_3_2_vision_file) as f: + content = f.open("r").read() else: - with open(llama_3_2_text_file, "r") as f: - content = f.read() + with importlib.resources.as_file(llama_3_2_text_file) as f: + content = f.open("r").read() render_markdown_to_pager(content) diff --git a/llama_stack/cli/stack/run.py b/llama_stack/cli/stack/run.py index fb4e76d7a..3dee9eca1 100644 --- a/llama_stack/cli/stack/run.py +++ b/llama_stack/cli/stack/run.py @@ -51,7 +51,7 @@ class StackRun(Subcommand): ) def _run_stack_run_cmd(self, args: argparse.Namespace) -> None: - import pkg_resources + import importlib.resources import yaml from llama_stack.distribution.build import ImageType @@ -106,15 +106,15 @@ class StackRun(Subcommand): config = parse_and_maybe_upgrade_config(config_dict) if config.docker_image: - script = pkg_resources.resource_filename( - "llama_stack", - "distribution/start_container.sh", + script = ( + importlib.resources.files("llama_stack") + / "distribution/start_container.sh" ) run_args = [script, config.docker_image] else: - script = pkg_resources.resource_filename( - "llama_stack", - "distribution/start_conda_env.sh", + script = ( + importlib.resources.files("llama_stack") + / "distribution/start_conda_env.sh" ) run_args = [ script, diff --git a/llama_stack/distribution/build.py b/llama_stack/distribution/build.py index f376301f9..e1fa08c58 100644 --- a/llama_stack/distribution/build.py +++ b/llama_stack/distribution/build.py @@ -10,7 +10,7 @@ from enum import Enum from pathlib import Path from typing import Dict, List -import pkg_resources +import importlib.resources from pydantic import BaseModel from termcolor import cprint @@ -111,8 +111,8 @@ def build_image(build_config: BuildConfig, build_file_path: Path): normal_deps += SERVER_DEPENDENCIES if build_config.image_type == ImageType.docker.value: - script = pkg_resources.resource_filename( - "llama_stack", "distribution/build_container.sh" + script = ( + importlib.resources.files("llama_stack") / "distribution/build_container.sh" ) args = [ script, @@ -123,8 +123,8 @@ def build_image(build_config: BuildConfig, build_file_path: Path): " ".join(normal_deps), ] elif build_config.image_type == ImageType.conda.value: - script = pkg_resources.resource_filename( - "llama_stack", "distribution/build_conda_env.sh" + script = ( + importlib.resources.files("llama_stack") / "distribution/build_conda_env.sh" ) args = [ script, @@ -133,9 +133,7 @@ def build_image(build_config: BuildConfig, build_file_path: Path): " ".join(normal_deps), ] elif build_config.image_type == ImageType.venv.value: - script = pkg_resources.resource_filename( - "llama_stack", "distribution/build_venv.sh" - ) + script = importlib.resources.files("llama_stack") / "distribution/build_venv.sh" args = [ script, build_config.name, diff --git a/llama_stack/distribution/stack.py b/llama_stack/distribution/stack.py index 965df5f03..e5d6f4c03 100644 --- a/llama_stack/distribution/stack.py +++ b/llama_stack/distribution/stack.py @@ -10,7 +10,7 @@ import re from pathlib import Path from typing import Any, Dict, Optional -import pkg_resources +import importlib.resources import yaml from termcolor import colored @@ -190,14 +190,13 @@ async def construct_stack( def get_stack_run_config_from_template(template: str) -> StackRunConfig: - template_path = pkg_resources.resource_filename( - "llama_stack", f"templates/{template}/run.yaml" + template_path = ( + importlib.resources.files("llama_stack") / f"templates/{template}/run.yaml" ) - if not Path(template_path).exists(): - raise ValueError(f"Template '{template}' not found at {template_path}") - - with open(template_path) as f: - run_config = yaml.safe_load(f) + with importlib.resources.as_file(template_path) as path: + if not path.exists(): + raise ValueError(f"Template '{template}' not found at {template_path}") + run_config = yaml.safe_load(path.open()) return StackRunConfig(**replace_env_vars(run_config))