forked from phoenix-oss/llama-stack-mirror
# What does this PR do? **Main Thing** - Add a simple test step before publishing docker image in workflow **Side Fix** - Docker push action fails recently due to extra prefix introduced. E.g. see: https://github.com/meta-llama/llama-stack/pull/802#issuecomment-2599507062 cc @terrytangyuan ## Test Plan 1. Release a TestPyPi version on this code: 0.0.63.dev512067663581203331
``` # 1. build docker image TEST_PYPI_VERSION=0.0.63.dev51206766 llama stack build --template fireworks # 2. test the docker image cd distributions/fireworks && docker compose up ``` 4. Test the full build + test docker flow using TestPyPi from (1):1284218494
<img width="1049" alt="image" src="https://github.com/user-attachments/assets/c025893d-5ce2-48ff-aa90-de00e105ee09" /> ## Sources Please link relevant resources if necessary. ## Before submitting - [ ] This PR fixes a typo or improves the docs (you can dismiss the other checks if that's the case). - [ ] Ran pre-commit to handle lint / formatting issues. - [ ] Read the [contributor guideline](https://github.com/meta-llama/llama-stack/blob/main/CONTRIBUTING.md), Pull Request section? - [ ] Updated relevant documentation. - [ ] Wrote necessary unit or integration tests.
138 lines
5.6 KiB
YAML
138 lines
5.6 KiB
YAML
name: Docker Build and Publish
|
|
|
|
on:
|
|
workflow_dispatch:
|
|
inputs:
|
|
version:
|
|
description: 'TestPyPI or PyPI version to build (e.g., 0.0.63.dev20250114)'
|
|
required: true
|
|
type: string
|
|
|
|
jobs:
|
|
build-and-push:
|
|
runs-on: ubuntu-latest
|
|
env:
|
|
TOGETHER_API_KEY: ${{ secrets.TOGETHER_API_KEY }}
|
|
FIREWORKS_API_KEY: ${{ secrets.FIREWORKS_API_KEY }}
|
|
TAVILY_SEARCH_API_KEY: ${{ secrets.TAVILY_SEARCH_API_KEY }}
|
|
permissions:
|
|
contents: read
|
|
packages: write
|
|
|
|
steps:
|
|
- name: Checkout repository
|
|
uses: actions/checkout@v4
|
|
|
|
- name: Set up Docker Buildx
|
|
uses: docker/setup-buildx-action@v3
|
|
|
|
- name: Log in to the Container registry
|
|
uses: docker/login-action@v3
|
|
with:
|
|
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
|
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
|
|
|
- name: Set version
|
|
id: version
|
|
run: |
|
|
if [ "${{ github.event_name }}" = "push" ]; then
|
|
echo "VERSION=0.0.63.dev51206766" >> $GITHUB_OUTPUT
|
|
else
|
|
echo "VERSION=${{ inputs.version }}" >> $GITHUB_OUTPUT
|
|
fi
|
|
|
|
- name: Check package version availability
|
|
run: |
|
|
# Function to check if version exists in a repository
|
|
check_version() {
|
|
local repo=$1
|
|
local status_code=$(curl -s -o /dev/null -w "%{http_code}" "https://$repo.org/project/llama-stack/${{ steps.version.outputs.version }}")
|
|
return $([ "$status_code" -eq 200 ])
|
|
}
|
|
|
|
# Check TestPyPI first, then PyPI
|
|
if check_version "test.pypi"; then
|
|
echo "Version ${{ steps.version.outputs.version }} found in TestPyPI"
|
|
echo "PYPI_SOURCE=testpypi" >> $GITHUB_ENV
|
|
elif check_version "pypi"; then
|
|
echo "Version ${{ steps.version.outputs.version }} found in PyPI"
|
|
echo "PYPI_SOURCE=pypi" >> $GITHUB_ENV
|
|
else
|
|
echo "Error: Version ${{ steps.version.outputs.version }} not found in either TestPyPI or PyPI"
|
|
exit 1
|
|
fi
|
|
|
|
- name: Install llama-stack
|
|
run: |
|
|
if [ "${{ github.event_name }}" = "push" ]; then
|
|
pip install -e .
|
|
else
|
|
if [ "$PYPI_SOURCE" = "testpypi" ]; then
|
|
pip install --index-url https://test.pypi.org/simple/ --extra-index-url https://pypi.org/simple llama-stack==${{ steps.version.outputs.version }}
|
|
else
|
|
pip install llama-stack==${{ steps.version.outputs.version }}
|
|
fi
|
|
fi
|
|
|
|
- name: Build docker image
|
|
run: |
|
|
TEMPLATES=("ollama" "bedrock" "remote-vllm" "fireworks" "together" "tgi" "meta-reference-gpu")
|
|
for template in "${TEMPLATES[@]}"; do
|
|
if [ "$PYPI_SOURCE" = "testpypi" ]; then
|
|
TEST_PYPI_VERSION=${{ steps.version.outputs.version }} llama stack build --template $template --image-type container
|
|
else
|
|
PYPI_VERSION=${{ steps.version.outputs.version }} llama stack build --template $template --image-type container
|
|
fi
|
|
done
|
|
|
|
- name: List docker images
|
|
run: |
|
|
docker images
|
|
|
|
# TODO (xiyan): make the following 2 steps into a matrix and test all templates other than fireworks
|
|
- name: Start up built docker image
|
|
run: |
|
|
cd distributions/fireworks
|
|
if [ "$PYPI_SOURCE" = "testpypi" ]; then
|
|
sed -i 's|image: llamastack/distribution-fireworks|image: distribution-fireworks:test-${{ steps.version.outputs.version }}|' ./compose.yaml
|
|
else
|
|
sed -i 's|image: llamastack/distribution-fireworks|image: distribution-fireworks:${{ steps.version.outputs.version }}|' ./compose.yaml
|
|
fi
|
|
docker compose up -d
|
|
cd ..
|
|
# Wait for the container to start
|
|
timeout=300
|
|
while ! curl -s -f http://localhost:8321/v1/version > /dev/null && [ $timeout -gt 0 ]; do
|
|
echo "Waiting for endpoint to be available..."
|
|
sleep 5
|
|
timeout=$((timeout - 5))
|
|
done
|
|
|
|
if [ $timeout -le 0 ]; then
|
|
echo "Timeout waiting for endpoint to become available"
|
|
exit 1
|
|
fi
|
|
|
|
- name: Run simple models list test on docker server
|
|
run: |
|
|
curl http://localhost:8321/v1/models
|
|
|
|
# TODO (xiyan): figure out why client cannot find server but curl works
|
|
# - name: Run pytest on docker server
|
|
# run: |
|
|
# pip install pytest pytest-md-report
|
|
# export LLAMA_STACK_BASE_URL="http://localhost:8321"
|
|
# LLAMA_STACK_BASE_URL="http://localhost:8321" pytest -v tests/client-sdk/inference/test_inference.py --md-report --md-report-verbose=1
|
|
|
|
- name: Push to dockerhub
|
|
run: |
|
|
TEMPLATES=("ollama" "bedrock" "remote-vllm" "fireworks" "together" "tgi" "meta-reference-gpu")
|
|
for template in "${TEMPLATES[@]}"; do
|
|
if [ "$PYPI_SOURCE" = "testpypi" ]; then
|
|
docker tag distribution-$template:test-${{ steps.version.outputs.version }} llamastack/distribution-$template:test-${{ steps.version.outputs.version }}
|
|
docker push llamastack/distribution-$template:test-${{ steps.version.outputs.version }}
|
|
else
|
|
docker tag distribution-$template:${{ steps.version.outputs.version }} llamastack/distribution-$template:${{ steps.version.outputs.version }}
|
|
docker push llamastack/distribution-$template:${{ steps.version.outputs.version }}
|
|
fi
|
|
done
|