fix: update dangling references to llama download command

After removing model management CLI (#3700), update remaining references
to the old `llama download` command to use `huggingface-cli download` instead.

Changes:
- Update error messages in meta_reference and torchtune providers
- Update post-training notebook to use huggingface-cli
- Fix typo: "you model" -> "your model"
This commit is contained in:
Ashwin Bharambe 2025-10-09 17:43:07 -07:00
parent 145b2bcf25
commit 59947506a3
3 changed files with 6369 additions and 6410 deletions

File diff suppressed because one or more lines are too long

View file

@ -18,7 +18,7 @@ def model_checkpoint_dir(model_id) -> str:
assert checkpoint_dir.exists(), ( assert checkpoint_dir.exists(), (
f"Could not find checkpoints in: {model_local_dir(model_id)}. " f"Could not find checkpoints in: {model_local_dir(model_id)}. "
f"If you try to use the native llama model, Please download model using `llama download --model-id {model_id}`" f"If you try to use the native llama model, please download the model using `llama-model download --source meta --model-id {model_id}` (see https://github.com/meta-llama/llama-models). "
f"Otherwise, please save you model checkpoint under {model_local_dir(model_id)}" f"Otherwise, please save your model checkpoint under {model_local_dir(model_id)}"
) )
return str(checkpoint_dir) return str(checkpoint_dir)

View file

@ -104,9 +104,10 @@ class LoraFinetuningSingleDevice:
if not any(p.exists() for p in paths): if not any(p.exists() for p in paths):
checkpoint_dir = checkpoint_dir / "original" checkpoint_dir = checkpoint_dir / "original"
hf_repo = model.huggingface_repo or f"meta-llama/{model.descriptor()}"
assert checkpoint_dir.exists(), ( assert checkpoint_dir.exists(), (
f"Could not find checkpoints in: {model_local_dir(model.descriptor())}. " f"Could not find checkpoints in: {model_local_dir(model.descriptor())}. "
f"Please download model using `llama download --model-id {model.descriptor()}`" f"Please download the model using `huggingface-cli download {hf_repo} --local-dir ~/.llama/{model.descriptor()}`"
) )
return str(checkpoint_dir) return str(checkpoint_dir)