#!/usr/bin/env python # Copyright (c) Meta Platforms, Inc. and affiliates. # All rights reserved. # # This source code is licensed under the terms described in the LICENSE file in # the root directory of this source tree. # Copyright (c) Meta Platforms, Inc. and affiliates. # All rights reserved. # # This source code is licensed under the terms described in the LICENSE file in # top-level folder for each specific model found within the models/ directory at # the top-level of this source tree. # Run this script: # torchrun --nproc_per_node=8 scripts/generate_prompt_format.py meta-llama/Llama-4-17B-Omni-Instruct-BF16-16E ~/.llama/checkpoints/Llama-4-17B-Omni-Instruct-BF16-16E/ llama_stack.models.llama.llama4.prompts llama_stack/models/llama/llama4/prompt_format.md import importlib import os from pathlib import Path import fire from llama_stack.models.llama.sku_list import resolve_model from llama_stack.providers.inline.inference.meta_reference.config import ( MetaReferenceInferenceConfig, ) from llama_stack.providers.inline.inference.meta_reference.llama3.generation import ( Llama3, ) from llama_stack.providers.inline.inference.meta_reference.llama4.generation import ( Llama4, ) THIS_DIR = Path(__file__).parent.resolve() def run_main( model_id: str, checkpoint_dir: str, module_name: str, output_path: str, llama4: bool = True, ): module = importlib.import_module(module_name) assert hasattr(module, "usecases"), f"Module {module_name} missing usecases function" llama_model = resolve_model(model_id) if not llama_model: raise ValueError(f"Model {model_id} not found") if not llama4: config = MetaReferenceInferenceConfig( model=model_id, max_seq_len=4096, max_batch_size=1, checkpoint_dir=checkpoint_dir, ) generator = Llama3.build( config=config, model_id=model_id, llama_model=llama_model, ) else: generator = Llama4.build( ckpt_dir=checkpoint_dir, max_seq_len=4096, max_batch_size=1, ) use_cases = module.usecases() text = "" for u in use_cases: if isinstance(u, str): use_case_text = f"\n{u}\n" else: use_case_text = u.to_text(generator) text += use_case_text print(use_case_text) os.makedirs(os.path.dirname(output_path), exist_ok=True) with open(output_path, "w") as f: f.write(text) def main(): fire.Fire(run_main) if __name__ == "__main__": main()