From a5309d6ff1fbc378d27ad8128ea5bbe5370ab78f Mon Sep 17 00:00:00 2001 From: Omer Tuchfeld Date: Tue, 22 Jul 2025 10:55:09 +0200 Subject: [PATCH] fix(install): explicit docker.io usage When podman is used and the registry is omitted, podman will prompt the user. However, we're piping the output of podman to /dev/null and the user will not see the prompt, the script will end abruptly and this is confusing. This commit explicitly uses the docker.io registry for the ollama image and the llama-stack image so that the prompt is avoided. Signed-off-by: Omer Tuchfeld --- scripts/install.sh | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/scripts/install.sh b/scripts/install.sh index dae43df38..b5afe43b8 100755 --- a/scripts/install.sh +++ b/scripts/install.sh @@ -15,7 +15,7 @@ set -Eeuo pipefail PORT=8321 OLLAMA_PORT=11434 MODEL_ALIAS="llama3.2:3b" -SERVER_IMAGE="llamastack/distribution-ollama:0.2.2" +SERVER_IMAGE="docker.io/llamastack/distribution-ollama:0.2.2" WAIT_TIMEOUT=300 log(){ printf "\e[1;32m%s\e[0m\n" "$*"; } @@ -165,7 +165,7 @@ log "🦙 Starting Ollama…" $ENGINE run -d "${PLATFORM_OPTS[@]}" --name ollama-server \ --network llama-net \ -p "${OLLAMA_PORT}:${OLLAMA_PORT}" \ - ollama/ollama > /dev/null 2>&1 + docker.io/ollama/ollama > /dev/null 2>&1 if ! wait_for_service "http://localhost:${OLLAMA_PORT}/" "Ollama" "$WAIT_TIMEOUT" "Ollama daemon"; then log "❌ Ollama daemon did not become ready in ${WAIT_TIMEOUT}s; dumping container logs:"