From 30117dea227cee088d3748769455a75928dc9dd5 Mon Sep 17 00:00:00 2001 From: slekkala1 Date: Thu, 28 Aug 2025 13:20:36 -0700 Subject: [PATCH] fix: docker failing to start container [fireworks-ai] (#3267) # What does this PR do? https://github.com/llamastack/llama-stack-ops/actions/runs/17253649880 Fixes the issue with open ai package incompatibilty introduced through new dependency of fireworks-ai==0.19.18->reward-kit by pinning to fireworks older version that doesnt pull in reward-kit ## Test Plan Tested locally with the following commands to start a container 1. Build container `llama stack build --distro starter --image-type container` 2. start container `docker run -d -p 8321:8321 --name llama-stack-test distribution-starter:0.2.19` 3. check health http://localhost:8321/v1/health Above steps fails without the fix --- llama_stack/providers/registry/inference.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/llama_stack/providers/registry/inference.py b/llama_stack/providers/registry/inference.py index 82b771a28..6264de7c7 100644 --- a/llama_stack/providers/registry/inference.py +++ b/llama_stack/providers/registry/inference.py @@ -116,7 +116,7 @@ def available_providers() -> list[ProviderSpec]: adapter=AdapterSpec( adapter_type="fireworks", pip_packages=[ - "fireworks-ai", + "fireworks-ai<=0.18.0", ], module="llama_stack.providers.remote.inference.fireworks", config_class="llama_stack.providers.remote.inference.fireworks.FireworksImplConfig",