diff --git a/.github/workflows/publish-to-docker.yml b/.github/workflows/publish-to-docker.yml index dc1fa08fc..77321fae7 100644 --- a/.github/workflows/publish-to-docker.yml +++ b/.github/workflows/publish-to-docker.yml @@ -91,9 +91,15 @@ jobs: run: | docker images + # TODO (xiyan): make this into a matrix and test all templates other than fireworks - name: Start up built docker image run: | cd distributions/fireworks + if [ "$PYPI_SOURCE" = "testpypi" ]; then + sed -i 's|image: llamastack/distribution-fireworks|image: llamastack/distribution-fireworks:test-${{ steps.version.outputs.version }}|' ./compose.yaml + else + sed -i 's|image: llamastack/distribution-fireworks|image: llamastack/distribution-fireworks:${{ steps.version.outputs.version }}|' ./compose.yaml + fi docker compose up -d cd .. # Wait for the container to start @@ -113,22 +119,22 @@ jobs: run: | curl http://localhost:8321/v1/models - # TODO: figure out why client cannot find server but curl works - - name: Run pytest on docker server - run: | - pip install pytest pytest-md-report - export LLAMA_STACK_BASE_URL="http://localhost:8321" - LLAMA_STACK_BASE_URL="http://localhost:8321" pytest -v tests/client-sdk/inference/test_inference.py --md-report --md-report-verbose=1 - - # - name: Push to dockerhub + # TODO (xiyan): figure out why client cannot find server but curl works + # - name: Run pytest on docker server # run: | - # TEMPLATES=("ollama" "bedrock" "remote-vllm" "fireworks" "together" "tgi" "meta-reference-gpu") - # for template in "${TEMPLATES[@]}"; do - # if [ "$PYPI_SOURCE" = "testpypi" ]; then - # docker tag distribution-$template:test-${{ steps.version.outputs.version }} llamastack/distribution-$template:test-${{ steps.version.outputs.version }} - # docker push llamastack/distribution-$template:test-${{ steps.version.outputs.version }} - # else - # docker tag distribution-$template:${{ steps.version.outputs.version }} llamastack/distribution-$template:${{ steps.version.outputs.version }} - # docker push llamastack/distribution-$template:${{ steps.version.outputs.version }} - # fi - # done + # pip install pytest pytest-md-report + # export LLAMA_STACK_BASE_URL="http://localhost:8321" + # LLAMA_STACK_BASE_URL="http://localhost:8321" pytest -v tests/client-sdk/inference/test_inference.py --md-report --md-report-verbose=1 + + - name: Push to dockerhub + run: | + TEMPLATES=("ollama" "bedrock" "remote-vllm" "fireworks" "together" "tgi" "meta-reference-gpu") + for template in "${TEMPLATES[@]}"; do + if [ "$PYPI_SOURCE" = "testpypi" ]; then + docker tag distribution-$template:test-${{ steps.version.outputs.version }} llamastack/distribution-$template:test-${{ steps.version.outputs.version }} + docker push llamastack/distribution-$template:test-${{ steps.version.outputs.version }} + else + docker tag distribution-$template:${{ steps.version.outputs.version }} llamastack/distribution-$template:${{ steps.version.outputs.version }} + docker push llamastack/distribution-$template:${{ steps.version.outputs.version }} + fi + done diff --git a/distributions/fireworks/compose.yaml b/distributions/fireworks/compose.yaml index a8645d340..84b8491e4 100644 --- a/distributions/fireworks/compose.yaml +++ b/distributions/fireworks/compose.yaml @@ -1,6 +1,6 @@ services: llamastack: - image: distribution-fireworks:test-0.0.63.dev51206766 + image: llamastack/distribution-fireworks ports: - "8321:8321" environment: diff --git a/distributions/together/compose.yaml b/distributions/together/compose.yaml index 354a0bd6e..f66ee69f9 100644 --- a/distributions/together/compose.yaml +++ b/distributions/together/compose.yaml @@ -1,6 +1,6 @@ services: llamastack: - image: distribution-together:test-0.0.63.dev51206766 + image: llamastack/distribution-together ports: - "8321:8321" environment: