mirror of
https://github.com/meta-llama/llama-stack.git
synced 2025-12-03 09:53:45 +00:00
Standardize CI workflows to use `release-X.Y.x` branch pattern instead of multiple numeric variants. That's the pattern we are settling on. See https://github.com/llamastack/llama-stack-ops/pull/20 for reference.
84 lines
3.3 KiB
YAML
84 lines
3.3 KiB
YAML
name: 'Setup Test Environment'
|
|
description: 'Common setup steps for integration tests including dependencies, providers, and build'
|
|
|
|
inputs:
|
|
python-version:
|
|
description: 'Python version to use'
|
|
required: true
|
|
client-version:
|
|
description: 'Client version (latest or published)'
|
|
required: true
|
|
setup:
|
|
description: 'Setup to configure (ollama, vllm, gpt, etc.)'
|
|
required: false
|
|
default: 'ollama'
|
|
suite:
|
|
description: 'Test suite to use: base, responses, vision, etc.'
|
|
required: false
|
|
default: ''
|
|
inference-mode:
|
|
description: 'Inference mode (record or replay)'
|
|
required: true
|
|
|
|
runs:
|
|
using: 'composite'
|
|
steps:
|
|
- name: Install dependencies
|
|
uses: ./.github/actions/setup-runner
|
|
with:
|
|
python-version: ${{ inputs.python-version }}
|
|
client-version: ${{ inputs.client-version }}
|
|
|
|
- name: Setup ollama
|
|
if: ${{ (inputs.setup == 'ollama' || inputs.setup == 'ollama-vision') && inputs.inference-mode == 'record' }}
|
|
uses: ./.github/actions/setup-ollama
|
|
with:
|
|
suite: ${{ inputs.suite }}
|
|
|
|
- name: Setup vllm
|
|
if: ${{ inputs.setup == 'vllm' && inputs.inference-mode == 'record' }}
|
|
uses: ./.github/actions/setup-vllm
|
|
|
|
- name: Build Llama Stack
|
|
shell: bash
|
|
run: |
|
|
# Install llama-stack-client-python based on the client-version input
|
|
if [ "${{ inputs.client-version }}" = "latest" ]; then
|
|
# Check if PR is targeting a release branch
|
|
TARGET_BRANCH="${{ github.base_ref }}"
|
|
|
|
if [[ "$TARGET_BRANCH" =~ ^release-[0-9]+\.[0-9]+\.x$ ]]; then
|
|
echo "PR targets release branch: $TARGET_BRANCH"
|
|
echo "Checking if matching branch exists in llama-stack-client-python..."
|
|
|
|
# Check if the branch exists in the client repo
|
|
if git ls-remote --exit-code --heads https://github.com/llamastack/llama-stack-client-python.git "$TARGET_BRANCH" > /dev/null 2>&1; then
|
|
echo "Installing llama-stack-client-python from matching branch: $TARGET_BRANCH"
|
|
export LLAMA_STACK_CLIENT_DIR=git+https://github.com/llamastack/llama-stack-client-python.git@$TARGET_BRANCH
|
|
else
|
|
echo "::error::Branch $TARGET_BRANCH not found in llama-stack-client-python repository"
|
|
echo "::error::Please create the matching release branch in llama-stack-client-python before testing"
|
|
exit 1
|
|
fi
|
|
else
|
|
echo "Installing latest llama-stack-client-python from main branch"
|
|
export LLAMA_STACK_CLIENT_DIR=git+https://github.com/llamastack/llama-stack-client-python.git@main
|
|
fi
|
|
elif [ "${{ inputs.client-version }}" = "published" ]; then
|
|
echo "Installing published llama-stack-client-python from PyPI"
|
|
unset LLAMA_STACK_CLIENT_DIR
|
|
else
|
|
echo "Invalid client-version: ${{ inputs.client-version }}"
|
|
exit 1
|
|
fi
|
|
|
|
echo "Building Llama Stack"
|
|
|
|
LLAMA_STACK_DIR=. \
|
|
uv run --no-sync llama stack list-deps ci-tests | xargs -L1 uv pip install
|
|
|
|
- name: Configure git for commits
|
|
shell: bash
|
|
run: |
|
|
git config --local user.email "github-actions[bot]@users.noreply.github.com"
|
|
git config --local user.name "github-actions[bot]"
|