diff --git a/llama_stack/cli/model/prompt_format.py b/llama_stack/cli/model/prompt_format.py index 0cee94235..8058db461 100644 --- a/llama_stack/cli/model/prompt_format.py +++ b/llama_stack/cli/model/prompt_format.py @@ -13,7 +13,7 @@ from llama_stack.cli.subcommand import Subcommand from llama_stack.cli.table import print_table from llama_stack.models.llama.datatypes import CoreModelId, ModelFamily, is_multimodal, model_family -ROOT_DIR = Path(__file__).parent.parent +ROOT_DIR = Path(__file__).parent.parent.parent class ModelPromptFormat(Subcommand): @@ -44,6 +44,12 @@ class ModelPromptFormat(Subcommand): default="llama3_1", help="Model Family (llama3_1, llama3_X, etc.)", ) + self.parser.add_argument( + "-l", + "--list", + action="store_true", + help="List all available models", + ) def _run_model_template_cmd(self, args: argparse.Namespace) -> None: import importlib.resources