From c02464b6356006744e80c1e8fb96a4d22b1392ba Mon Sep 17 00:00:00 2001 From: Alina Ryan <66272285+alinaryan@users.noreply.github.com> Date: Thu, 13 Mar 2025 20:47:09 -0400 Subject: [PATCH] fix: Clarify `llama model prompt-format` help text (#1010) # What does this PR do? Updates the help text for the `llama model prompt-format` command to clarify that users should provide a specific model name (e.g., Llama3.1-8B, Llama3.2-11B-Vision), not a model family. Removes the default value and field for `--model-name` to prevent users from mistakenly thinking a model family name is acceptable. Adds guidance to run `llama model list` to view valid model names. [//]: # (If resolving an issue, uncomment and update the line below) [//]: # (Closes #[issue-number]) ## Test Plan Output of `llama model prompt-format -h` Before: ``` (venv) alina@fedora:~/dev/llama/llama-stack$ llama model prompt-format -h usage: llama model prompt-format [-h] [-m MODEL_NAME] Show llama model message formats options: -h, --help show this help message and exit -m MODEL_NAME, --model-name MODEL_NAME Model Family (llama3_1, llama3_X, etc.) Example: llama model prompt-format (venv) alina@fedora:~/dev/llama/llama-stack$ llama model prompt-format --model-name llama3_1 usage: llama model prompt-format [-h] [-m MODEL_NAME] llama model prompt-format: error: llama3_1 is not a valid Model. Choose one from -- Llama3.1-8B Llama3.1-70B Llama3.1-405B Llama3.1-8B-Instruct Llama3.1-70B-Instruct Llama3.1-405B-Instruct Llama3.2-1B Llama3.2-3B Llama3.2-1B-Instruct Llama3.2-3B-Instruct Llama3.2-11B-Vision Llama3.2-90B-Vision Llama3.2-11B-Vision-Instruct Llama3.2-90B-Vision-Instruct ``` Output of `llama model prompt-format -h` After: ``` (venv) alina@fedora:~/dev/llama/llama-stack$ llama model prompt-format -h usage: llama model prompt-format [-h] [-m MODEL_NAME] Show llama model message formats options: -h, --help show this help message and exit -m MODEL_NAME, --model-name MODEL_NAME Example: Llama3.1-8B or Llama3.2-11B-Vision, etc (Run `llama model list` to see a list of valid model names) Example: llama model prompt-format ``` Signed-off-by: Alina Ryan --- llama_stack/cli/model/prompt_format.py | 14 ++++++++++---- 1 file changed, 10 insertions(+), 4 deletions(-) diff --git a/llama_stack/cli/model/prompt_format.py b/llama_stack/cli/model/prompt_format.py index 8058db461..0d62dcc11 100644 --- a/llama_stack/cli/model/prompt_format.py +++ b/llama_stack/cli/model/prompt_format.py @@ -41,8 +41,8 @@ class ModelPromptFormat(Subcommand): "-m", "--model-name", type=str, - default="llama3_1", - help="Model Family (llama3_1, llama3_X, etc.)", + help="Example: Llama3.1-8B or Llama3.2-11B-Vision, etc\n" + "(Run `llama model list` to see a list of valid model names)", ) self.parser.add_argument( "-l", @@ -81,10 +81,16 @@ class ModelPromptFormat(Subcommand): try: model_id = CoreModelId(args.model_name) except ValueError: - self.parser.error(f"{args.model_name} is not a valid Model. Choose one from --\n{model_str}") + self.parser.error( + f"{args.model_name} is not a valid Model. Choose one from the list of valid models. " + f"Run `llama model list` to see the valid model names." + ) if model_id not in supported_model_ids: - self.parser.error(f"{model_id} is not a valid Model. Choose one from --\n {model_str}") + self.parser.error( + f"{model_id} is not a valid Model. Choose one from the list of valid models. " + f"Run `llama model list` to see the valid model names." + ) llama_3_1_file = ROOT_DIR / "models" / "llama" / "llama3_1" / "prompt_format.md" llama_3_2_text_file = ROOT_DIR / "models" / "llama" / "llama3_2" / "text_prompt_format.md"