llama-stack-mirror/.github/workflows/test-external.yml

92 lines
3 KiB
YAML

name: Test External API and Providers
run-name: Test the External API and Provider mechanisms
on:
push:
branches: [ main ]
pull_request:
branches: [ main ]
paths:
- 'src/llama_stack/**'
- '!src/llama_stack_ui/**'
- 'tests/integration/**'
- 'uv.lock'
- 'pyproject.toml'
- 'requirements.txt'
- 'tests/external/*'
- '.github/workflows/test-external.yml' # This workflow
jobs:
test-external:
runs-on: ubuntu-latest
strategy:
matrix:
image-type: [venv]
# We don't do container yet, it's tricky to install a package from the host into the
# container and point 'uv pip install' to the correct path...
steps:
- name: Checkout repository
uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
- name: Install dependencies
uses: ./.github/actions/setup-runner
- name: Create API configuration
run: |
mkdir -p /home/runner/.llama/apis.d
cp tests/external/weather.yaml /home/runner/.llama/apis.d/weather.yaml
- name: Create provider configuration
run: |
mkdir -p /home/runner/.llama/providers.d/remote/weather
cp tests/external/kaze.yaml /home/runner/.llama/providers.d/remote/weather/kaze.yaml
- name: Print distro dependencies
run: |
uv run --no-sync llama stack list-deps tests/external/config.yaml
- name: Build distro from config file
run: |
uv venv ci-test
source ci-test/bin/activate
uv pip install -e .
LLAMA_STACK_LOGGING=all=CRITICAL llama stack list-deps tests/external/config.yaml | xargs -L1 uv pip install
- name: Start Llama Stack server in background
if: ${{ matrix.image-type }} == 'venv'
env:
INFERENCE_MODEL: "meta-llama/Llama-3.2-3B-Instruct"
LLAMA_STACK_LOG_FILE: "server.log"
run: |
# Use the virtual environment created by the build step (name comes from build config)
source ci-test/bin/activate
uv pip list
nohup llama stack run tests/external/config.yaml > server.log 2>&1 &
- name: Wait for Llama Stack server to be ready
run: |
echo "Waiting for Llama Stack server..."
for i in {1..30}; do
if curl -sSf http://localhost:8321/v1/health | grep -q "OK"; then
echo "Llama Stack server is up!"
exit 0
fi
sleep 1
done
echo "Llama Stack server failed to start"
cat server.log
exit 1
- name: Test external API
run: |
curl -sSf http://localhost:8321/v1/weather/locations
- name: Upload all logs to artifacts
if: ${{ always() }}
uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v5.0.0
with:
name: logs-${{ github.run_id }}-${{ github.run_attempt }}-external-test
path: |
*.log
retention-days: 1