From 9ec54dcbe73b52bb2d0613bc5864b6422e284165 Mon Sep 17 00:00:00 2001 From: Yuan Tang Date: Mon, 13 Jan 2025 23:20:02 -0500 Subject: [PATCH] Switch to use importlib instead of deprecated pkg_resources (#678) `pkg_resources` has been deprecated. This PR switches to use `importlib.resources`. --------- Signed-off-by: Yuan Tang --- llama_stack/cli/model/prompt_format.py | 27 +++++++++++++------------- llama_stack/cli/stack/build.py | 16 +++++++-------- llama_stack/cli/stack/run.py | 15 +++++++------- llama_stack/distribution/build.py | 14 ++++++------- llama_stack/distribution/stack.py | 16 +++++++-------- 5 files changed, 43 insertions(+), 45 deletions(-) diff --git a/llama_stack/cli/model/prompt_format.py b/llama_stack/cli/model/prompt_format.py index 67f456175..5fdfb51a6 100644 --- a/llama_stack/cli/model/prompt_format.py +++ b/llama_stack/cli/model/prompt_format.py @@ -43,7 +43,7 @@ class ModelPromptFormat(Subcommand): ) def _run_model_template_cmd(self, args: argparse.Namespace) -> None: - import pkg_resources + import importlib.resources # Only Llama 3.1 and 3.2 are supported supported_model_ids = [ @@ -64,25 +64,26 @@ class ModelPromptFormat(Subcommand): f"{model_id} is not a valid Model. Choose one from --\n {model_str}" ) - llama_3_1_file = pkg_resources.resource_filename( - "llama_models", "llama3_1/prompt_format.md" + llama_3_1_file = ( + importlib.resources.files("llama_models") / "llama3_1/prompt_format.md" ) - llama_3_2_text_file = pkg_resources.resource_filename( - "llama_models", "llama3_2/text_prompt_format.md" + llama_3_2_text_file = ( + importlib.resources.files("llama_models") / "llama3_2/text_prompt_format.md" ) - llama_3_2_vision_file = pkg_resources.resource_filename( - "llama_models", "llama3_2/vision_prompt_format.md" + llama_3_2_vision_file = ( + importlib.resources.files("llama_models") + / "llama3_2/vision_prompt_format.md" ) if model_family(model_id) == ModelFamily.llama3_1: - with open(llama_3_1_file, "r") as f: - content = f.read() + with importlib.resources.as_file(llama_3_1_file) as f: + content = f.open("r").read() elif model_family(model_id) == ModelFamily.llama3_2: if is_multimodal(model_id): - with open(llama_3_2_vision_file, "r") as f: - content = f.read() + with importlib.resources.as_file(llama_3_2_vision_file) as f: + content = f.open("r").read() else: - with open(llama_3_2_text_file, "r") as f: - content = f.read() + with importlib.resources.as_file(llama_3_2_text_file) as f: + content = f.open("r").read() render_markdown_to_pager(content) diff --git a/llama_stack/cli/stack/build.py b/llama_stack/cli/stack/build.py index 54d78ad93..084374c8a 100644 --- a/llama_stack/cli/stack/build.py +++ b/llama_stack/cli/stack/build.py @@ -4,14 +4,15 @@ # This source code is licensed under the terms described in the LICENSE file in # the root directory of this source tree. import argparse + +import importlib.resources + import os import shutil from functools import lru_cache from pathlib import Path from typing import List, Optional -import pkg_resources - from llama_stack.cli.subcommand import Subcommand from llama_stack.distribution.datatypes import ( @@ -290,13 +291,12 @@ class StackBuild(Subcommand): if template_name: # copy run.yaml from template to build_dir instead of generating it again - template_path = pkg_resources.resource_filename( - "llama_stack", f"templates/{template_name}/run.yaml" + template_path = ( + importlib.resources.files("llama_stack") + / f"templates/{template_name}/run.yaml" ) - os.makedirs(build_dir, exist_ok=True) - run_config_file = build_dir / f"{build_config.name}-run.yaml" - shutil.copy(template_path, run_config_file) - + with importlib.resources.as_file(template_path) as path: + shutil.copy(path, run_config_file) # Find all ${env.VARIABLE} patterns cprint("Build Successful!", color="green") else: diff --git a/llama_stack/cli/stack/run.py b/llama_stack/cli/stack/run.py index 1e4e6d7a1..90b2ecf6d 100644 --- a/llama_stack/cli/stack/run.py +++ b/llama_stack/cli/stack/run.py @@ -52,7 +52,8 @@ class StackRun(Subcommand): ) def _run_stack_run_cmd(self, args: argparse.Namespace) -> None: - import pkg_resources + import importlib.resources + import yaml from llama_stack.distribution.build import ImageType @@ -107,15 +108,15 @@ class StackRun(Subcommand): config = parse_and_maybe_upgrade_config(config_dict) if config.docker_image: - script = pkg_resources.resource_filename( - "llama_stack", - "distribution/start_container.sh", + script = ( + importlib.resources.files("llama_stack") + / "distribution/start_container.sh" ) run_args = [script, config.docker_image] else: - script = pkg_resources.resource_filename( - "llama_stack", - "distribution/start_conda_env.sh", + script = ( + importlib.resources.files("llama_stack") + / "distribution/start_conda_env.sh" ) run_args = [ script, diff --git a/llama_stack/distribution/build.py b/llama_stack/distribution/build.py index f376301f9..5a7dfba11 100644 --- a/llama_stack/distribution/build.py +++ b/llama_stack/distribution/build.py @@ -4,13 +4,13 @@ # This source code is licensed under the terms described in the LICENSE file in # the root directory of this source tree. +import importlib.resources import logging from enum import Enum from pathlib import Path from typing import Dict, List -import pkg_resources from pydantic import BaseModel from termcolor import cprint @@ -111,8 +111,8 @@ def build_image(build_config: BuildConfig, build_file_path: Path): normal_deps += SERVER_DEPENDENCIES if build_config.image_type == ImageType.docker.value: - script = pkg_resources.resource_filename( - "llama_stack", "distribution/build_container.sh" + script = ( + importlib.resources.files("llama_stack") / "distribution/build_container.sh" ) args = [ script, @@ -123,8 +123,8 @@ def build_image(build_config: BuildConfig, build_file_path: Path): " ".join(normal_deps), ] elif build_config.image_type == ImageType.conda.value: - script = pkg_resources.resource_filename( - "llama_stack", "distribution/build_conda_env.sh" + script = ( + importlib.resources.files("llama_stack") / "distribution/build_conda_env.sh" ) args = [ script, @@ -133,9 +133,7 @@ def build_image(build_config: BuildConfig, build_file_path: Path): " ".join(normal_deps), ] elif build_config.image_type == ImageType.venv.value: - script = pkg_resources.resource_filename( - "llama_stack", "distribution/build_venv.sh" - ) + script = importlib.resources.files("llama_stack") / "distribution/build_venv.sh" args = [ script, build_config.name, diff --git a/llama_stack/distribution/stack.py b/llama_stack/distribution/stack.py index c85e4c7de..acbd42fa9 100644 --- a/llama_stack/distribution/stack.py +++ b/llama_stack/distribution/stack.py @@ -4,13 +4,12 @@ # This source code is licensed under the terms described in the LICENSE file in # the root directory of this source tree. +import importlib.resources import logging import os import re -from pathlib import Path from typing import Any, Dict, Optional -import pkg_resources import yaml from termcolor import colored @@ -211,14 +210,13 @@ async def construct_stack( def get_stack_run_config_from_template(template: str) -> StackRunConfig: - template_path = pkg_resources.resource_filename( - "llama_stack", f"templates/{template}/run.yaml" + template_path = ( + importlib.resources.files("llama_stack") / f"templates/{template}/run.yaml" ) - if not Path(template_path).exists(): - raise ValueError(f"Template '{template}' not found at {template_path}") - - with open(template_path) as f: - run_config = yaml.safe_load(f) + with importlib.resources.as_file(template_path) as path: + if not path.exists(): + raise ValueError(f"Template '{template}' not found at {template_path}") + run_config = yaml.safe_load(path.open()) return StackRunConfig(**replace_env_vars(run_config))