mirror of
https://github.com/meta-llama/llama-stack.git
synced 2025-12-12 12:06:04 +00:00
Moved package code from llama_stack/ to src/llama_stack/ following Python packaging best practices. Updated pyproject.toml, MANIFEST.in, and tool configurations accordingly. Public API and import paths remain unchanged. Developers will need to reinstall in editable mode after pulling this change. Also updated paths in pre-commit config, scripts, and GitHub workflows.
92 lines
3 KiB
YAML
92 lines
3 KiB
YAML
name: Test External API and Providers
|
|
|
|
run-name: Test the External API and Provider mechanisms
|
|
|
|
on:
|
|
push:
|
|
branches: [ main ]
|
|
pull_request:
|
|
branches: [ main ]
|
|
paths:
|
|
- 'src/llama_stack/**'
|
|
- '!llama_stack/ui/**'
|
|
- 'tests/integration/**'
|
|
- 'uv.lock'
|
|
- 'pyproject.toml'
|
|
- 'requirements.txt'
|
|
- 'tests/external/*'
|
|
- '.github/workflows/test-external.yml' # This workflow
|
|
|
|
jobs:
|
|
test-external:
|
|
runs-on: ubuntu-latest
|
|
strategy:
|
|
matrix:
|
|
image-type: [venv]
|
|
# We don't do container yet, it's tricky to install a package from the host into the
|
|
# container and point 'uv pip install' to the correct path...
|
|
steps:
|
|
- name: Checkout repository
|
|
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
|
|
|
- name: Install dependencies
|
|
uses: ./.github/actions/setup-runner
|
|
|
|
- name: Create API configuration
|
|
run: |
|
|
mkdir -p /home/runner/.llama/apis.d
|
|
cp tests/external/weather.yaml /home/runner/.llama/apis.d/weather.yaml
|
|
|
|
- name: Create provider configuration
|
|
run: |
|
|
mkdir -p /home/runner/.llama/providers.d/remote/weather
|
|
cp tests/external/kaze.yaml /home/runner/.llama/providers.d/remote/weather/kaze.yaml
|
|
|
|
- name: Print distro dependencies
|
|
run: |
|
|
uv run --no-sync llama stack list-deps tests/external/build.yaml
|
|
|
|
- name: Build distro from config file
|
|
run: |
|
|
uv venv ci-test
|
|
source ci-test/bin/activate
|
|
uv pip install -e .
|
|
LLAMA_STACK_LOGGING=all=CRITICAL llama stack list-deps tests/external/build.yaml | xargs -L1 uv pip install
|
|
|
|
- name: Start Llama Stack server in background
|
|
if: ${{ matrix.image-type }} == 'venv'
|
|
env:
|
|
INFERENCE_MODEL: "meta-llama/Llama-3.2-3B-Instruct"
|
|
LLAMA_STACK_LOG_FILE: "server.log"
|
|
run: |
|
|
# Use the virtual environment created by the build step (name comes from build config)
|
|
source ci-test/bin/activate
|
|
uv pip list
|
|
nohup llama stack run tests/external/run-byoa.yaml > server.log 2>&1 &
|
|
|
|
- name: Wait for Llama Stack server to be ready
|
|
run: |
|
|
echo "Waiting for Llama Stack server..."
|
|
for i in {1..30}; do
|
|
if curl -sSf http://localhost:8321/v1/health | grep -q "OK"; then
|
|
echo "Llama Stack server is up!"
|
|
exit 0
|
|
fi
|
|
sleep 1
|
|
done
|
|
echo "Llama Stack server failed to start"
|
|
cat server.log
|
|
exit 1
|
|
|
|
- name: Test external API
|
|
run: |
|
|
curl -sSf http://localhost:8321/v1/weather/locations
|
|
|
|
- name: Upload all logs to artifacts
|
|
if: ${{ always() }}
|
|
uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v5.0.0
|
|
with:
|
|
name: logs-${{ github.run_id }}-${{ github.run_attempt }}-external-test
|
|
path: |
|
|
*.log
|
|
retention-days: 1
|