diff --git a/.gitignore b/.gitignore index f024dc9fc..321e946a9 100644 --- a/.gitignore +++ b/.gitignore @@ -1,3 +1,4 @@ __pycache__ dist *.egg-info +dev_requirements.txt diff --git a/llama_toolchain/cli/llama.py b/llama_toolchain/cli/llama.py index 0d2545adf..52a98cfe1 100644 --- a/llama_toolchain/cli/llama.py +++ b/llama_toolchain/cli/llama.py @@ -2,6 +2,7 @@ import argparse from llama_toolchain.cli.download import Download from llama_toolchain.cli.inference.inference import InferenceParser +from llama_toolchain.cli.model.model import ModelParser class LlamaCLIParser: @@ -22,6 +23,7 @@ class LlamaCLIParser: # Add sub-commands Download.create(subparsers) InferenceParser.create(subparsers) + ModelParser.create(subparsers) # Import sub-commands from agentic_system if they exist try: diff --git a/llama_toolchain/cli/model/__init__.py b/llama_toolchain/cli/model/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/llama_toolchain/cli/model/model.py b/llama_toolchain/cli/model/model.py new file mode 100644 index 000000000..6f9e2a2b3 --- /dev/null +++ b/llama_toolchain/cli/model/model.py @@ -0,0 +1,29 @@ +import argparse +import textwrap + +from llama_toolchain.cli.model.template import ModelTemplate +from llama_toolchain.cli.subcommand import Subcommand + + +class ModelParser(Subcommand): + """Llama cli for model interface apis""" + + def __init__(self, subparsers: argparse._SubParsersAction): + super().__init__() + self.parser = subparsers.add_parser( + "model", + prog="llama model", + description="Describe llama model interfaces", + epilog=textwrap.dedent( + """ + Example: + llama model + """ + ), + ) + + subparsers = self.parser.add_subparsers(title="model_subcommands") + + # Add sub-commandsa + # ModelDescribe.create(subparsers) + ModelTemplate.create(subparsers) diff --git a/llama_toolchain/cli/model/template.py b/llama_toolchain/cli/model/template.py new file mode 100644 index 000000000..bdfe04bf7 --- /dev/null +++ b/llama_toolchain/cli/model/template.py @@ -0,0 +1,45 @@ +import argparse +import textwrap + +from llama_toolchain.cli.subcommand import Subcommand +from llama_models.llama3_1.api.interface import render_jinja_template, list_jinja_templates + + +class ModelTemplate(Subcommand): + """Llama model cli for describe a model template (message formats)""" + + def __init__(self, subparsers: argparse._SubParsersAction): + super().__init__() + self.parser = subparsers.add_parser( + "template", + prog="llama model template", + description="Show llama model message formats", + epilog=textwrap.dedent( + """ + Example: + llama model template + """ + ), + formatter_class=argparse.RawTextHelpFormatter, + ) + self._add_arguments() + self.parser.set_defaults(func=self._run_model_template_cmd) + + def _add_arguments(self): + self.parser.add_argument( + "sku", + type=str, + help="Model SKU", + ) + self.parser.add_argument( + "--template", + type=str, + help="Usecase template name (system_message, user_message, assistant_message, tool_message)...", + required=False, + ) + + def _run_model_template_cmd(self, args: argparse.Namespace) -> None: + if args.template: + render_jinja_template(args.template) + else: + list_jinja_templates()