mirror of
https://github.com/meta-llama/llama-stack.git
synced 2025-10-08 04:54:38 +00:00
cli updates
This commit is contained in:
parent
23fe353e4a
commit
c9f33d8f68
5 changed files with 14 additions and 9 deletions
|
@ -7,9 +7,10 @@ from huggingface_hub import snapshot_download
|
|||
from huggingface_hub.utils import GatedRepoError, RepositoryNotFoundError
|
||||
|
||||
from toolchain.cli.subcommand import Subcommand
|
||||
from toolchain.utils import DEFAULT_DUMP_DIR
|
||||
|
||||
|
||||
DEFAULT_CHECKPOINT_DIR = f"{os.path.expanduser('~')}/.llama/checkpoints/"
|
||||
DEFAULT_CHECKPOINT_DIR = os.path.join(DEFAULT_DUMP_DIR, "checkpoints")
|
||||
|
||||
|
||||
class Download(Subcommand):
|
||||
|
@ -61,10 +62,8 @@ class Download(Subcommand):
|
|||
|
||||
def _run_download_cmd(self, args: argparse.Namespace):
|
||||
model_name = args.repo_id.split("/")[-1]
|
||||
|
||||
output_dir = Path(DEFAULT_CHECKPOINT_DIR) / model_name
|
||||
os.makedirs(output_dir, exist_ok=True)
|
||||
|
||||
output_dir = Path(output_dir) / model_name
|
||||
try:
|
||||
true_output_dir = snapshot_download(
|
||||
args.repo_id,
|
||||
|
|
|
@ -5,9 +5,11 @@ import textwrap
|
|||
from pathlib import Path
|
||||
|
||||
from toolchain.cli.subcommand import Subcommand
|
||||
from toolchain.utils import DEFAULT_DUMP_DIR
|
||||
|
||||
|
||||
CONFIGS_BASE_DIR = f"{os.path.expanduser('~')}/.llama/configs/"
|
||||
CONFIGS_BASE_DIR = os.path.join(DEFAULT_DUMP_DIR, "configs")
|
||||
|
||||
|
||||
class InferenceConfigure(Subcommand):
|
||||
"""Llama cli for configuring llama toolchain configs"""
|
||||
|
|
|
@ -43,6 +43,7 @@ class InferenceStart(Subcommand):
|
|||
"--config",
|
||||
type=str,
|
||||
help="Path to config file",
|
||||
default="inference"
|
||||
)
|
||||
|
||||
def _run_inference_start_cmd(self, args: argparse.Namespace) -> None:
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue