mirror of
https://github.com/meta-llama/llama-stack.git
synced 2025-10-04 12:07:34 +00:00
local imports for faster cli
This commit is contained in:
parent
af4710c959
commit
67229f23a4
9 changed files with 44 additions and 47 deletions
|
@ -7,14 +7,9 @@
|
|||
import argparse
|
||||
import textwrap
|
||||
|
||||
from llama_models.llama3_1.api.interface import (
|
||||
list_jinja_templates,
|
||||
render_jinja_template,
|
||||
)
|
||||
from termcolor import colored
|
||||
|
||||
from llama_toolchain.cli.subcommand import Subcommand
|
||||
from llama_toolchain.cli.table import print_table
|
||||
|
||||
|
||||
class ModelTemplate(Subcommand):
|
||||
|
@ -53,6 +48,12 @@ class ModelTemplate(Subcommand):
|
|||
)
|
||||
|
||||
def _run_model_template_cmd(self, args: argparse.Namespace) -> None:
|
||||
from llama_models.llama3_1.api.interface import (
|
||||
list_jinja_templates,
|
||||
render_jinja_template,
|
||||
)
|
||||
from llama_toolchain.cli.table import print_table
|
||||
|
||||
if args.name:
|
||||
template, tokens_info = render_jinja_template(args.name)
|
||||
rendered = ""
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue