refactor: remove Conda support from Llama Stack (#2969)

# What does this PR do?
<!-- Provide a short summary of what this PR does and why. Link to
relevant issues if applicable. -->
This PR is responsible for removal of Conda support in Llama Stack

<!-- If resolving an issue, uncomment and update the line below -->
<!-- Closes #[issue-number] -->
Closes #2539

## Test Plan
<!-- Describe the tests you ran to verify your changes with result
summaries. *Provide clear instructions so the plan can be easily
re-executed.* -->
This commit is contained in:
IAN MILLER 2025-08-02 23:52:59 +01:00 committed by GitHub
parent f2eee4e417
commit a749d5f4a4
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
44 changed files with 159 additions and 311 deletions

View file

@ -7,7 +7,6 @@
import importlib.resources
import logging
import sys
from pathlib import Path
from pydantic import BaseModel
from termcolor import cprint
@ -106,7 +105,6 @@ def print_pip_install_help(config: BuildConfig):
def build_image(
build_config: BuildConfig,
build_file_path: Path,
image_name: str,
template_or_config: str,
run_config: str | None = None,
@ -138,18 +136,7 @@ def build_image(
# build arguments
if run_config is not None:
args.extend(["--run-config", run_config])
elif build_config.image_type == LlamaStackImageType.CONDA.value:
script = str(importlib.resources.files("llama_stack") / "core/build_conda_env.sh")
args = [
script,
"--env-name",
str(image_name),
"--build-file-path",
str(build_file_path),
"--normal-deps",
" ".join(normal_deps),
]
elif build_config.image_type == LlamaStackImageType.VENV.value:
else:
script = str(importlib.resources.files("llama_stack") / "core/build_venv.sh")
args = [
script,

View file

@ -6,9 +6,6 @@
# This source code is licensed under the terms described in the LICENSE file in
# the root directory of this source tree.
# TODO: combine this with build_conda_env.sh since it is almost identical
# the only difference is that we don't do any conda-specific setup
LLAMA_STACK_DIR=${LLAMA_STACK_DIR:-}
LLAMA_STACK_CLIENT_DIR=${LLAMA_STACK_CLIENT_DIR:-}
TEST_PYPI_VERSION=${TEST_PYPI_VERSION:-}
@ -95,6 +92,8 @@ if [ -n "$LLAMA_STACK_CLIENT_DIR" ]; then
echo "Using llama-stack-client-dir=$LLAMA_STACK_CLIENT_DIR"
fi
ENVNAME=""
# pre-run checks to make sure we can proceed with the installation
pre_run_checks() {
local env_name="$1"

View file

@ -7,12 +7,10 @@
# the root directory of this source tree.
cleanup() {
envname="$1"
set +x
echo "Cleaning up..."
conda deactivate
conda env remove --name "$envname" -y
# For venv environments, no special cleanup is needed
# This function exists to avoid "function not found" errors
local env_name="$1"
echo "Cleanup called for environment: $env_name"
}
handle_int() {
@ -31,19 +29,7 @@ handle_exit() {
fi
}
setup_cleanup_handlers() {
trap handle_int INT
trap handle_exit EXIT
if is_command_available conda; then
__conda_setup="$('conda' 'shell.bash' 'hook' 2>/dev/null)"
eval "$__conda_setup"
conda deactivate
else
echo "conda is not available"
exit 1
fi
}
# check if a command is present
is_command_available() {

View file

@ -432,8 +432,8 @@ class BuildConfig(BaseModel):
distribution_spec: DistributionSpec = Field(description="The distribution spec to build including API providers. ")
image_type: str = Field(
default="conda",
description="Type of package to build (conda | container | venv)",
default="venv",
description="Type of package to build (container | venv)",
)
image_name: str | None = Field(
default=None,

View file

@ -40,7 +40,6 @@ port="$1"
shift
SCRIPT_DIR=$(dirname "$(readlink -f "$0")")
source "$SCRIPT_DIR/common.sh"
# Initialize variables
yaml_config=""
@ -75,9 +74,9 @@ while [[ $# -gt 0 ]]; do
esac
done
# Check if yaml_config is required based on env_type
if [[ "$env_type" == "venv" || "$env_type" == "conda" ]] && [ -z "$yaml_config" ]; then
echo -e "${RED}Error: --config is required for venv and conda environments${NC}" >&2
# Check if yaml_config is required
if [[ "$env_type" == "venv" ]] && [ -z "$yaml_config" ]; then
echo -e "${RED}Error: --config is required for venv environment${NC}" >&2
exit 1
fi
@ -101,19 +100,14 @@ case "$env_type" in
source "$env_path_or_name/bin/activate"
fi
;;
"conda")
if ! is_command_available conda; then
echo -e "${RED}Error: conda not found" >&2
exit 1
fi
eval "$(conda shell.bash hook)"
conda deactivate && conda activate "$env_path_or_name"
PYTHON_BINARY="$CONDA_PREFIX/bin/python"
;;
*)
# Handle unsupported env_types here
echo -e "${RED}Error: Unsupported environment type '$env_type'. Only 'venv' is supported.${NC}" >&2
exit 1
;;
esac
if [[ "$env_type" == "venv" || "$env_type" == "conda" ]]; then
if [[ "$env_type" == "venv" ]]; then
set -x
if [ -n "$yaml_config" ]; then

View file

@ -9,7 +9,7 @@
1. Start up Llama Stack API server. More details [here](https://llama-stack.readthedocs.io/en/latest/getting_started/index.html).
```
llama stack build --template together --image-type conda
llama stack build --template together --image-type venv
llama stack run together
```

View file

@ -15,69 +15,21 @@ from termcolor import cprint
log = logging.getLogger(__name__)
import importlib
import json
from pathlib import Path
from llama_stack.core.utils.image_types import LlamaStackImageType
def formulate_run_args(image_type: str, image_name: str) -> list[str]:
env_name = ""
def formulate_run_args(image_type: str, image_name: str) -> list:
# Only venv is supported now
current_venv = os.environ.get("VIRTUAL_ENV")
env_name = image_name or current_venv
if not env_name:
cprint(
"No current virtual environment detected, please specify a virtual environment name with --image-name",
color="red",
file=sys.stderr,
)
return []
if image_type == LlamaStackImageType.CONDA.value:
current_conda_env = os.environ.get("CONDA_DEFAULT_ENV")
env_name = image_name or current_conda_env
if not env_name:
cprint(
"No current conda environment detected, please specify a conda environment name with --image-name",
color="red",
file=sys.stderr,
)
return
def get_conda_prefix(env_name):
# Conda "base" environment does not end with "base" in the
# prefix, so should be handled separately.
if env_name == "base":
return os.environ.get("CONDA_PREFIX")
# Get conda environments info
conda_env_info = json.loads(subprocess.check_output(["conda", "info", "--envs", "--json"]).decode())
envs = conda_env_info["envs"]
for envpath in envs:
if os.path.basename(envpath) == env_name:
return envpath
return None
cprint(f"Using conda environment: {env_name}", color="green", file=sys.stderr)
conda_prefix = get_conda_prefix(env_name)
if not conda_prefix:
cprint(
f"Conda environment {env_name} does not exist.",
color="red",
file=sys.stderr,
)
return
build_file = Path(conda_prefix) / "llamastack-build.yaml"
if not build_file.exists():
cprint(
f"Build file {build_file} does not exist.\n\nPlease run `llama stack build` or specify the correct conda environment name with --image-name",
color="red",
file=sys.stderr,
)
return
else:
# else must be venv since that is the only valid option left.
current_venv = os.environ.get("VIRTUAL_ENV")
env_name = image_name or current_venv
if not env_name:
cprint(
"No current virtual environment detected, please specify a virtual environment name with --image-name",
color="red",
file=sys.stderr,
)
return
cprint(f"Using virtual environment: {env_name}", file=sys.stderr)
cprint(f"Using virtual environment: {env_name}", file=sys.stderr)
script = importlib.resources.files("llama_stack") / "core/start_stack.sh"
run_args = [
@ -93,7 +45,8 @@ def in_notebook():
try:
from IPython import get_ipython
if "IPKernelApp" not in get_ipython().config: # pragma: no cover
ipython = get_ipython()
if ipython is None or "IPKernelApp" not in ipython.config: # pragma: no cover
return False
except ImportError:
return False

View file

@ -9,5 +9,4 @@ import enum
class LlamaStackImageType(enum.Enum):
CONTAINER = "container"
CONDA = "conda"
VENV = "venv"