mirror of
https://github.com/meta-llama/llama-stack.git
synced 2025-08-01 16:24:44 +00:00
Make run yaml optional so dockers can start with just --env
This commit is contained in:
parent
1d8d0593af
commit
adbb605a50
2 changed files with 28 additions and 3 deletions
|
@ -122,7 +122,7 @@ add_to_docker <<EOF
|
||||||
# This would be good in production but for debugging flexibility lets not add it right now
|
# This would be good in production but for debugging flexibility lets not add it right now
|
||||||
# We need a more solid production ready entrypoint.sh anyway
|
# We need a more solid production ready entrypoint.sh anyway
|
||||||
#
|
#
|
||||||
ENTRYPOINT ["python", "-m", "llama_stack.distribution.server.server"]
|
ENTRYPOINT ["python", "-m", "llama_stack.distribution.server.server", "--template", "$build_name"]
|
||||||
|
|
||||||
EOF
|
EOF
|
||||||
|
|
||||||
|
|
|
@ -16,6 +16,7 @@ import traceback
|
||||||
import warnings
|
import warnings
|
||||||
|
|
||||||
from contextlib import asynccontextmanager
|
from contextlib import asynccontextmanager
|
||||||
|
from pathlib import Path
|
||||||
from ssl import SSLError
|
from ssl import SSLError
|
||||||
from typing import Any, Dict, Optional
|
from typing import Any, Dict, Optional
|
||||||
|
|
||||||
|
@ -49,6 +50,9 @@ from llama_stack.distribution.stack import (
|
||||||
from .endpoints import get_all_api_endpoints
|
from .endpoints import get_all_api_endpoints
|
||||||
|
|
||||||
|
|
||||||
|
REPO_ROOT = Path(__file__).parent.parent.parent.parent
|
||||||
|
|
||||||
|
|
||||||
def warn_with_traceback(message, category, filename, lineno, file=None, line=None):
|
def warn_with_traceback(message, category, filename, lineno, file=None, line=None):
|
||||||
log = file if hasattr(file, "write") else sys.stderr
|
log = file if hasattr(file, "write") else sys.stderr
|
||||||
traceback.print_stack(file=log)
|
traceback.print_stack(file=log)
|
||||||
|
@ -279,9 +283,12 @@ def main():
|
||||||
parser = argparse.ArgumentParser(description="Start the LlamaStack server.")
|
parser = argparse.ArgumentParser(description="Start the LlamaStack server.")
|
||||||
parser.add_argument(
|
parser.add_argument(
|
||||||
"--yaml-config",
|
"--yaml-config",
|
||||||
default="llamastack-run.yaml",
|
|
||||||
help="Path to YAML configuration file",
|
help="Path to YAML configuration file",
|
||||||
)
|
)
|
||||||
|
parser.add_argument(
|
||||||
|
"--template",
|
||||||
|
help="One of the template names in llama_stack/templates (e.g., tgi, fireworks, remote-vllm, etc.)",
|
||||||
|
)
|
||||||
parser.add_argument("--port", type=int, default=5000, help="Port to listen on")
|
parser.add_argument("--port", type=int, default=5000, help="Port to listen on")
|
||||||
parser.add_argument(
|
parser.add_argument(
|
||||||
"--disable-ipv6", action="store_true", help="Whether to disable IPv6 support"
|
"--disable-ipv6", action="store_true", help="Whether to disable IPv6 support"
|
||||||
|
@ -303,10 +310,28 @@ def main():
|
||||||
print(f"Error: {str(e)}")
|
print(f"Error: {str(e)}")
|
||||||
sys.exit(1)
|
sys.exit(1)
|
||||||
|
|
||||||
with open(args.yaml_config, "r") as fp:
|
if args.yaml_config:
|
||||||
|
# if the user provided a config file, use it, even if template was specified
|
||||||
|
config_file = Path(args.yaml_config)
|
||||||
|
if not config_file.exists():
|
||||||
|
raise ValueError(f"Config file {config_file} does not exist")
|
||||||
|
|
||||||
|
elif args.template:
|
||||||
|
config_file = (
|
||||||
|
Path(REPO_ROOT) / "llama_stack" / "templates" / args.template / "run.yaml"
|
||||||
|
)
|
||||||
|
if not config_file.exists():
|
||||||
|
raise ValueError(f"Template {args.template} does not exist")
|
||||||
|
else:
|
||||||
|
raise ValueError("Either --yaml-config or --template must be provided")
|
||||||
|
|
||||||
|
with open(config_file, "r") as fp:
|
||||||
config = replace_env_vars(yaml.safe_load(fp))
|
config = replace_env_vars(yaml.safe_load(fp))
|
||||||
config = StackRunConfig(**config)
|
config = StackRunConfig(**config)
|
||||||
|
|
||||||
|
print(f"Using config file: {config_file}")
|
||||||
|
print(f"Config: {yaml.dump(config.model_dump(), indent=2)}")
|
||||||
|
|
||||||
app = FastAPI()
|
app = FastAPI()
|
||||||
|
|
||||||
try:
|
try:
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue