mirror of
https://github.com/meta-llama/llama-stack.git
synced 2025-08-03 17:29:01 +00:00
More deprecations
Signed-off-by: Yuan Tang <terrytangyuan@gmail.com>
This commit is contained in:
parent
49497d2d96
commit
ef214cbfc0
4 changed files with 34 additions and 36 deletions
|
@ -43,7 +43,7 @@ class ModelPromptFormat(Subcommand):
|
|||
)
|
||||
|
||||
def _run_model_template_cmd(self, args: argparse.Namespace) -> None:
|
||||
import pkg_resources
|
||||
import importlib.resources
|
||||
|
||||
# Only Llama 3.1 and 3.2 are supported
|
||||
supported_model_ids = [
|
||||
|
@ -64,25 +64,26 @@ class ModelPromptFormat(Subcommand):
|
|||
f"{model_id} is not a valid Model. Choose one from --\n {model_str}"
|
||||
)
|
||||
|
||||
llama_3_1_file = pkg_resources.resource_filename(
|
||||
"llama_models", "llama3_1/prompt_format.md"
|
||||
llama_3_1_file = (
|
||||
importlib.resources.files("llama_models") / "llama3_1/prompt_format.md"
|
||||
)
|
||||
llama_3_2_text_file = pkg_resources.resource_filename(
|
||||
"llama_models", "llama3_2/text_prompt_format.md"
|
||||
llama_3_2_text_file = (
|
||||
importlib.resources.files("llama_models") / "llama3_2/text_prompt_format.md"
|
||||
)
|
||||
llama_3_2_vision_file = pkg_resources.resource_filename(
|
||||
"llama_models", "llama3_2/vision_prompt_format.md"
|
||||
llama_3_2_vision_file = (
|
||||
importlib.resources.files("llama_models")
|
||||
/ "llama3_2/vision_prompt_format.md"
|
||||
)
|
||||
if model_family(model_id) == ModelFamily.llama3_1:
|
||||
with open(llama_3_1_file, "r") as f:
|
||||
content = f.read()
|
||||
with importlib.resources.as_file(llama_3_1_file) as f:
|
||||
content = f.open("r").read()
|
||||
elif model_family(model_id) == ModelFamily.llama3_2:
|
||||
if is_multimodal(model_id):
|
||||
with open(llama_3_2_vision_file, "r") as f:
|
||||
content = f.read()
|
||||
with importlib.resources.as_file(llama_3_2_vision_file) as f:
|
||||
content = f.open("r").read()
|
||||
else:
|
||||
with open(llama_3_2_text_file, "r") as f:
|
||||
content = f.read()
|
||||
with importlib.resources.as_file(llama_3_2_text_file) as f:
|
||||
content = f.open("r").read()
|
||||
|
||||
render_markdown_to_pager(content)
|
||||
|
||||
|
|
|
@ -51,7 +51,7 @@ class StackRun(Subcommand):
|
|||
)
|
||||
|
||||
def _run_stack_run_cmd(self, args: argparse.Namespace) -> None:
|
||||
import pkg_resources
|
||||
import importlib.resources
|
||||
import yaml
|
||||
|
||||
from llama_stack.distribution.build import ImageType
|
||||
|
@ -106,15 +106,15 @@ class StackRun(Subcommand):
|
|||
config = parse_and_maybe_upgrade_config(config_dict)
|
||||
|
||||
if config.docker_image:
|
||||
script = pkg_resources.resource_filename(
|
||||
"llama_stack",
|
||||
"distribution/start_container.sh",
|
||||
script = (
|
||||
importlib.resources.files("llama_stack")
|
||||
/ "distribution/start_container.sh"
|
||||
)
|
||||
run_args = [script, config.docker_image]
|
||||
else:
|
||||
script = pkg_resources.resource_filename(
|
||||
"llama_stack",
|
||||
"distribution/start_conda_env.sh",
|
||||
script = (
|
||||
importlib.resources.files("llama_stack")
|
||||
/ "distribution/start_conda_env.sh"
|
||||
)
|
||||
run_args = [
|
||||
script,
|
||||
|
|
|
@ -10,7 +10,7 @@ from enum import Enum
|
|||
from pathlib import Path
|
||||
from typing import Dict, List
|
||||
|
||||
import pkg_resources
|
||||
import importlib.resources
|
||||
from pydantic import BaseModel
|
||||
from termcolor import cprint
|
||||
|
||||
|
@ -111,8 +111,8 @@ def build_image(build_config: BuildConfig, build_file_path: Path):
|
|||
normal_deps += SERVER_DEPENDENCIES
|
||||
|
||||
if build_config.image_type == ImageType.docker.value:
|
||||
script = pkg_resources.resource_filename(
|
||||
"llama_stack", "distribution/build_container.sh"
|
||||
script = (
|
||||
importlib.resources.files("llama_stack") / "distribution/build_container.sh"
|
||||
)
|
||||
args = [
|
||||
script,
|
||||
|
@ -123,8 +123,8 @@ def build_image(build_config: BuildConfig, build_file_path: Path):
|
|||
" ".join(normal_deps),
|
||||
]
|
||||
elif build_config.image_type == ImageType.conda.value:
|
||||
script = pkg_resources.resource_filename(
|
||||
"llama_stack", "distribution/build_conda_env.sh"
|
||||
script = (
|
||||
importlib.resources.files("llama_stack") / "distribution/build_conda_env.sh"
|
||||
)
|
||||
args = [
|
||||
script,
|
||||
|
@ -133,9 +133,7 @@ def build_image(build_config: BuildConfig, build_file_path: Path):
|
|||
" ".join(normal_deps),
|
||||
]
|
||||
elif build_config.image_type == ImageType.venv.value:
|
||||
script = pkg_resources.resource_filename(
|
||||
"llama_stack", "distribution/build_venv.sh"
|
||||
)
|
||||
script = importlib.resources.files("llama_stack") / "distribution/build_venv.sh"
|
||||
args = [
|
||||
script,
|
||||
build_config.name,
|
||||
|
|
|
@ -10,7 +10,7 @@ import re
|
|||
from pathlib import Path
|
||||
from typing import Any, Dict, Optional
|
||||
|
||||
import pkg_resources
|
||||
import importlib.resources
|
||||
import yaml
|
||||
|
||||
from termcolor import colored
|
||||
|
@ -190,14 +190,13 @@ async def construct_stack(
|
|||
|
||||
|
||||
def get_stack_run_config_from_template(template: str) -> StackRunConfig:
|
||||
template_path = pkg_resources.resource_filename(
|
||||
"llama_stack", f"templates/{template}/run.yaml"
|
||||
template_path = (
|
||||
importlib.resources.files("llama_stack") / f"templates/{template}/run.yaml"
|
||||
)
|
||||
|
||||
if not Path(template_path).exists():
|
||||
raise ValueError(f"Template '{template}' not found at {template_path}")
|
||||
|
||||
with open(template_path) as f:
|
||||
run_config = yaml.safe_load(f)
|
||||
with importlib.resources.as_file(template_path) as path:
|
||||
if not path.exists():
|
||||
raise ValueError(f"Template '{template}' not found at {template_path}")
|
||||
run_config = yaml.safe_load(path.open())
|
||||
|
||||
return StackRunConfig(**replace_env_vars(run_config))
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue