mirror of
https://github.com/meta-llama/llama-stack.git
synced 2025-06-27 18:50:41 +00:00
fix: Use CONDA_DEFAULT_ENV presence as a flag to use conda mode (#1555)
# What does this PR do? This is the second attempt to switch to system packages by default. Now with a hack to detect conda environment - in which case conda image-type is used. Note: Conda will only be used when --image-name is unset *and* CONDA_DEFAULT_ENV is set. This means that users without conda will correctly fall back to using system packages when no --image-* arguments are passed at all. [//]: # (If resolving an issue, uncomment and update the line below) [//]: # (Closes #[issue-number]) ## Test Plan Uses virtualenv: ``` $ llama stack build --template ollama --image-type venv $ llama stack run --image-type venv ~/.llama/distributions/ollama/ollama-run.yaml [...] Using virtual environment: /home/ec2-user/src/llama-stack/schedule/.local [...] ``` Uses system packages (virtualenv already initialized): ``` $ llama stack run ~/.llama/distributions/ollama/ollama-run.yaml [...] INFO 2025-03-27 20:46:22,882 llama_stack.cli.stack.run:142 server: No image type or image name provided. Assuming environment packages. [...] ``` Attempt to run from environment packages without necessary packages installed: ``` $ python -m venv barebones $ . ./barebones/bin/activate $ pip install -e . # to install llama command $ llama stack run ~/.llama/distributions/ollama/ollama-run.yaml [...] ModuleNotFoundError: No module named 'fastapi' ``` ^ failed as expected because the environment doesn't have necessary packages installed. Now install some packages in the new environment: ``` $ pip install fastapi opentelemetry-api opentelemetry-sdk opentelemetry-exporter-otlp aiosqlite ollama openai datasets faiss-cpu mcp autoevals $ llama stack run ~/.llama/distributions/ollama/ollama-run.yaml [...] Uvicorn running on http://['::', '0.0.0.0']:8321 (Press CTRL+C to quit) ``` Now see if setting CONDA_DEFAULT_ENV will change what happens by default: ``` $ export CONDA_DEFAULT_ENV=base $ llama stack run ~/.llama/distributions/ollama/ollama-run.yaml [...] Using conda environment: base Conda environment base does not exist. [...] ``` --------- Signed-off-by: Ihar Hrachyshka <ihar.hrachyshka@gmail.com>
This commit is contained in:
parent
b5c27f77ad
commit
18bac27d4e
6 changed files with 102 additions and 36 deletions
|
@ -21,6 +21,7 @@ from prompt_toolkit.completion import WordCompleter
|
|||
from prompt_toolkit.validation import Validator
|
||||
from termcolor import cprint
|
||||
|
||||
from llama_stack.cli.stack.utils import ImageType
|
||||
from llama_stack.cli.table import print_table
|
||||
from llama_stack.distribution.build import (
|
||||
SERVER_DEPENDENCIES,
|
||||
|
@ -62,10 +63,10 @@ def run_stack_build_command(args: argparse.Namespace) -> None:
|
|||
if args.list_templates:
|
||||
return _run_template_list_cmd()
|
||||
|
||||
if args.image_type == "venv":
|
||||
if args.image_type == ImageType.VENV.value:
|
||||
current_venv = os.environ.get("VIRTUAL_ENV")
|
||||
image_name = args.image_name or current_venv
|
||||
elif args.image_type == "conda":
|
||||
elif args.image_type == ImageType.CONDA.value:
|
||||
current_conda_env = os.environ.get("CONDA_DEFAULT_ENV")
|
||||
image_name = args.image_name or current_conda_env
|
||||
else:
|
||||
|
@ -84,7 +85,7 @@ def run_stack_build_command(args: argparse.Namespace) -> None:
|
|||
build_config.image_type = args.image_type
|
||||
else:
|
||||
cprint(
|
||||
f"Please specify a image-type (container | conda | venv) for {args.template}",
|
||||
f"Please specify a image-type ({' | '.join(e.value for e in ImageType)}) for {args.template}",
|
||||
color="red",
|
||||
)
|
||||
sys.exit(1)
|
||||
|
@ -98,15 +99,15 @@ def run_stack_build_command(args: argparse.Namespace) -> None:
|
|||
)
|
||||
|
||||
image_type = prompt(
|
||||
"> Enter the image type you want your Llama Stack to be built as (container or conda or venv): ",
|
||||
f"> Enter the image type you want your Llama Stack to be built as ({' or '.join(e.value for e in ImageType)}): ",
|
||||
validator=Validator.from_callable(
|
||||
lambda x: x in ["container", "conda", "venv"],
|
||||
error_message="Invalid image type, please enter conda or container or venv",
|
||||
lambda x: x in [e.value for e in ImageType],
|
||||
error_message=f"Invalid image type, please enter {' or '.join(e.value for e in ImageType)}",
|
||||
),
|
||||
default="conda",
|
||||
default=ImageType.CONDA.value,
|
||||
)
|
||||
|
||||
if image_type == "conda":
|
||||
if image_type == ImageType.CONDA.value:
|
||||
if not image_name:
|
||||
cprint(
|
||||
f"No current conda environment detected or specified, will create a new conda environment with the name `llamastack-{name}`",
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue