fix: Clarify llama model prompt-format help text

Updates the help text for the `llama model prompt-format` command
to clarify that users should provide a specific model name
(e.g., Llama3.1-8B, Llama3.2-11B-Vision), not a model family.
Removes the default value and field for `--model-name` to prevent users from
mistakenly thinking a model family name is acceptable.
Adds guidance to run `llama model list` to view valid model names.

Signed-off-by: Alina Ryan <aliryan@redhat.com>
This commit is contained in:
Alina Ryan 2025-02-07 13:10:00 -05:00
parent 10bda65b94
commit 04174bafa0

View file

@ -38,8 +38,8 @@ class ModelPromptFormat(Subcommand):
"-m",
"--model-name",
type=str,
default="llama3_1",
help="Model Family (llama3_1, llama3_X, etc.)",
help="Example: Llama3.1-8B or Llama3.2-11B-Vision, etc\n"
"(Run `llama model list` to see a list of valid model names)",
)
def _run_model_template_cmd(self, args: argparse.Namespace) -> None:
@ -53,10 +53,16 @@ class ModelPromptFormat(Subcommand):
try:
model_id = CoreModelId(args.model_name)
except ValueError:
self.parser.error(f"{args.model_name} is not a valid Model. Choose one from --\n{model_str}")
self.parser.error(
f"{args.model_name} is not a valid Model. Choose one from the list of valid models. "
f"Run `llama model list` to see the valid model names."
)
if model_id not in supported_model_ids:
self.parser.error(f"{model_id} is not a valid Model. Choose one from --\n {model_str}")
self.parser.error(
f"{model_id} is not a valid Model. Choose one from the list of valid models. "
f"Run `llama model list` to see the valid model names."
)
llama_3_1_file = importlib.resources.files("llama_models") / "llama3_1/prompt_format.md"
llama_3_2_text_file = importlib.resources.files("llama_models") / "llama3_2/text_prompt_format.md"