From 734f59d3b84091a9f21396eb404f050fe36e9232 Mon Sep 17 00:00:00 2001 From: AshleyT3 Date: Thu, 3 Oct 2024 23:24:47 -0700 Subject: [PATCH] Check that the model is found before use. (#182) --- llama_stack/cli/download.py | 7 +++---- 1 file changed, 3 insertions(+), 4 deletions(-) diff --git a/llama_stack/cli/download.py b/llama_stack/cli/download.py index 658ed40e8..4d0966bb2 100644 --- a/llama_stack/cli/download.py +++ b/llama_stack/cli/download.py @@ -158,12 +158,11 @@ def run_download_cmd(args: argparse.Namespace, parser: argparse.ArgumentParser): info = prompt_guard_download_info() else: model = resolve_model(args.model_id) + if model is None: + parser.error(f"Model {args.model_id} not found") + return info = llama_meta_net_info(model) - if model is None: - parser.error(f"Model {args.model_id} not found") - return - if args.source == "huggingface": _hf_download(model, args.hf_token, args.ignore_patterns, parser) else: