From d27d959a18f54dc819e93655bf17861f6233c897 Mon Sep 17 00:00:00 2001 From: Eric Huang Date: Fri, 7 Mar 2025 11:43:59 -0800 Subject: [PATCH] fix(cli): llama model prompt-format Summary: Test Plan: --- llama_stack/cli/model/prompt_format.py | 8 +++++++- 1 file changed, 7 insertions(+), 1 deletion(-) diff --git a/llama_stack/cli/model/prompt_format.py b/llama_stack/cli/model/prompt_format.py index 0cee94235..8058db461 100644 --- a/llama_stack/cli/model/prompt_format.py +++ b/llama_stack/cli/model/prompt_format.py @@ -13,7 +13,7 @@ from llama_stack.cli.subcommand import Subcommand from llama_stack.cli.table import print_table from llama_stack.models.llama.datatypes import CoreModelId, ModelFamily, is_multimodal, model_family -ROOT_DIR = Path(__file__).parent.parent +ROOT_DIR = Path(__file__).parent.parent.parent class ModelPromptFormat(Subcommand): @@ -44,6 +44,12 @@ class ModelPromptFormat(Subcommand): default="llama3_1", help="Model Family (llama3_1, llama3_X, etc.)", ) + self.parser.add_argument( + "-l", + "--list", + action="store_true", + help="List all available models", + ) def _run_model_template_cmd(self, args: argparse.Namespace) -> None: import importlib.resources