fix(cli): llama model prompt-format

Summary:

Test Plan:
This commit is contained in:
Eric Huang 2025-03-07 11:43:59 -08:00
parent bad12ee21f
commit d27d959a18

View file

@ -13,7 +13,7 @@ from llama_stack.cli.subcommand import Subcommand
from llama_stack.cli.table import print_table from llama_stack.cli.table import print_table
from llama_stack.models.llama.datatypes import CoreModelId, ModelFamily, is_multimodal, model_family from llama_stack.models.llama.datatypes import CoreModelId, ModelFamily, is_multimodal, model_family
ROOT_DIR = Path(__file__).parent.parent ROOT_DIR = Path(__file__).parent.parent.parent
class ModelPromptFormat(Subcommand): class ModelPromptFormat(Subcommand):
@ -44,6 +44,12 @@ class ModelPromptFormat(Subcommand):
default="llama3_1", default="llama3_1",
help="Model Family (llama3_1, llama3_X, etc.)", help="Model Family (llama3_1, llama3_X, etc.)",
) )
self.parser.add_argument(
"-l",
"--list",
action="store_true",
help="List all available models",
)
def _run_model_template_cmd(self, args: argparse.Namespace) -> None: def _run_model_template_cmd(self, args: argparse.Namespace) -> None:
import importlib.resources import importlib.resources