forked from phoenix-oss/llama-stack-mirror
Add a --manifest-file
option to llama download
This commit is contained in:
parent
b8fc4d4dee
commit
5e072d0780
4 changed files with 78 additions and 14 deletions
|
@ -28,16 +28,16 @@ from llama_models.llama3_1.api.datatypes import Message
|
|||
from llama_models.llama3_1.api.tokenizer import Tokenizer
|
||||
from llama_models.llama3_1.reference_impl.model import Transformer
|
||||
from llama_models.sku_list import resolve_model
|
||||
from termcolor import cprint
|
||||
|
||||
from llama_toolchain.common.model_utils import model_local_dir
|
||||
from llama_toolchain.inference.api import QuantizationType
|
||||
from termcolor import cprint
|
||||
|
||||
from .config import MetaReferenceImplConfig
|
||||
|
||||
|
||||
def model_checkpoint_dir(model) -> str:
|
||||
checkpoint_dir = Path(model_local_dir(model))
|
||||
checkpoint_dir = Path(model_local_dir(model.descriptor()))
|
||||
if not Path(checkpoint_dir / "consolidated.00.pth").exists():
|
||||
checkpoint_dir = checkpoint_dir / "original"
|
||||
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue