From d797f9aec11f316501c1aa0923a676a3e30b9b89 Mon Sep 17 00:00:00 2001 From: Rohan Awhad <30470101+RohanAwhad@users.noreply.github.com> Date: Mon, 23 Jun 2025 23:38:08 -0400 Subject: [PATCH] fix: #2495 FileNotFound Err in container image (#2498) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit # What does this PR do? Closes #2495 Changes: - Delay the `COPY run.yaml` into docker image step until after external provider handling - Split the check for `external_providers_dir` into “non-empty” and “directory exists" ## Test Plan 0. Create and Activate venv 1. Create a `simple_build.yaml` ```yaml version: '2' distribution_spec: providers: inference: - remote::openai image_type: container image_name: openai-stack ``` 2. Run llama stack build: ```bash llama stack build --config simple_build.yaml ``` 3. Run the docker container: ```bash docker run \ -p 8321:8321 \ -e OPENAI_API_KEY=$OPENAI_API_KEY \ openai_stack:0.2.12 ``` This should show server is running. ``` INFO 2025-06-23 19:07:57,832 llama_stack.distribution.distribution:151 core: Loading external providers from /.llama/providers.d INFO 2025-06-23 19:07:59,324 __main__:572 server: Listening on ['::', '0.0.0.0']:8321 INFO: Started server process [1] INFO: Waiting for application startup. INFO 2025-06-23 19:07:59,336 __main__:156 server: Starting up INFO: Application startup complete. INFO: Uvicorn running on http://['::', '0.0.0.0']:8321 (Press CTRL+C to quit) ``` Notice the first line: ``` Loading external providers from /.llama/providers.d ``` This is expected behaviour. Co-authored-by: Rohan Awhad --- llama_stack/distribution/build_container.sh | 13 +++++++++---- 1 file changed, 9 insertions(+), 4 deletions(-) diff --git a/llama_stack/distribution/build_container.sh b/llama_stack/distribution/build_container.sh index c128729e1..0604d084e 100755 --- a/llama_stack/distribution/build_container.sh +++ b/llama_stack/distribution/build_container.sh @@ -163,9 +163,6 @@ EOF if [ -n "$run_config" ]; then # Copy the run config to the build context since it's an absolute path cp "$run_config" "$BUILD_CONTEXT_DIR/run.yaml" - add_to_container << EOF -COPY run.yaml $RUN_CONFIG_PATH -EOF # Parse the run.yaml configuration to identify external provider directories # If external providers are specified, copy their directory to the container @@ -173,12 +170,15 @@ EOF python_cmd=$(get_python_cmd) external_providers_dir=$($python_cmd -c "import yaml; config = yaml.safe_load(open('$run_config')); print(config.get('external_providers_dir') or '')") external_providers_dir=$(eval echo "$external_providers_dir") - if [ -n "$external_providers_dir" ] && [ -d "$external_providers_dir" ]; then + if [ -n "$external_providers_dir" ]; then + if [ -d "$external_providers_dir" ]; then echo "Copying external providers directory: $external_providers_dir" cp -r "$external_providers_dir" "$BUILD_CONTEXT_DIR/providers.d" add_to_container << EOF COPY providers.d /.llama/providers.d EOF + fi + # Edit the run.yaml file to change the external_providers_dir to /.llama/providers.d if [ "$(uname)" = "Darwin" ]; then sed -i.bak -e 's|external_providers_dir:.*|external_providers_dir: /.llama/providers.d|' "$BUILD_CONTEXT_DIR/run.yaml" @@ -187,6 +187,11 @@ EOF sed -i 's|external_providers_dir:.*|external_providers_dir: /.llama/providers.d|' "$BUILD_CONTEXT_DIR/run.yaml" fi fi + + # Copy run config into docker image + add_to_container << EOF +COPY run.yaml $RUN_CONFIG_PATH +EOF fi stack_mount="/app/llama-stack-source"