mirror of
https://github.com/meta-llama/llama-stack.git
synced 2025-06-27 18:50:41 +00:00
fix: resolve template name to config path in llama stack run
(#2361)
# What does this PR do? <!-- Provide a short summary of what this PR does and why. Link to relevant issues if applicable. --> This PR fixes a bug where running a known template by name using: `llama stack run ollama` would fail with the following error: `ValueError: Config file ollama does not exist` <!-- If resolving an issue, uncomment and update the line below --> Closes #2291 ## Test Plan <!-- Describe the tests you ran to verify your changes with result summaries. *Provide clear instructions so the plan can be easily re-executed.* --> `llama stack run ollama` should work
This commit is contained in:
parent
cba55808ab
commit
c70ca8344f
1 changed files with 6 additions and 2 deletions
|
@ -35,7 +35,8 @@ class StackRun(Subcommand):
|
|||
"config",
|
||||
type=str,
|
||||
nargs="?", # Make it optional
|
||||
help="Path to config file to use for the run. Required for venv and conda environments.",
|
||||
metavar="config | template",
|
||||
help="Path to config file to use for the run or name of known template (`llama stack list` for a list).",
|
||||
)
|
||||
self.parser.add_argument(
|
||||
"--port",
|
||||
|
@ -154,7 +155,10 @@ class StackRun(Subcommand):
|
|||
# func=<bound method StackRun._run_stack_run_cmd of <llama_stack.cli.stack.run.StackRun object at 0x10484b010>>
|
||||
if callable(getattr(args, arg)):
|
||||
continue
|
||||
setattr(server_args, arg, getattr(args, arg))
|
||||
if arg == "config" and template_name:
|
||||
server_args.config = str(config_file)
|
||||
else:
|
||||
setattr(server_args, arg, getattr(args, arg))
|
||||
|
||||
# Run the server
|
||||
server_main(server_args)
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue