mirror of
https://github.com/meta-llama/llama-stack.git
synced 2025-08-01 16:24:44 +00:00
Dynamically change provider in tests
This commit is contained in:
parent
4136accf48
commit
496879795e
1 changed files with 3 additions and 14 deletions
|
@ -41,11 +41,6 @@ on:
|
|||
required: true
|
||||
default: "meta-reference"
|
||||
|
||||
api_key:
|
||||
description: 'Provider API key'
|
||||
required: false
|
||||
default: "---"
|
||||
|
||||
env:
|
||||
# Path to model checkpoints within EFS volume
|
||||
MODEL_CHECKPOINT_DIR: "/data/llama/Llama3.2-3B-Instruct"
|
||||
|
@ -57,13 +52,7 @@ env:
|
|||
MODEL_IDS: "${{ inputs.model_ids || 'Llama3.2-3B-Instruct' }}"
|
||||
|
||||
# ID used for each test's provider config
|
||||
#PROVIDER_ID: "${{ inputs.provider_id || 'meta-reference' }}"
|
||||
|
||||
# Defined dynamically when each test is run below
|
||||
#PROVIDER_CONFIG: ""
|
||||
|
||||
# (Unused) API key that can be manually defined for workflow dispatch
|
||||
API_KEY: "${{ inputs.api_key || '' }}"
|
||||
PROVIDER_ID: "${{ inputs.provider_id || 'meta-reference' }}"
|
||||
|
||||
# Defines which directories in TESTS_PATH to exclude from the test loop
|
||||
EXCLUDED_DIRS: "__pycache__"
|
||||
|
@ -78,7 +67,7 @@ jobs:
|
|||
pull-requests: write
|
||||
defaults:
|
||||
run:
|
||||
shell: bash # default shell to run all steps for a given job.
|
||||
shell: bash
|
||||
runs-on: ${{ inputs.runner != '' && inputs.runner || 'llama-stack-gha-runner-gpu' }}
|
||||
if: always()
|
||||
steps:
|
||||
|
@ -236,7 +225,7 @@ jobs:
|
|||
for file in "$dir"/test_*.py; do
|
||||
test_name=$(basename "$file")
|
||||
new_file="result-${dir_name}-${test_name}.xml"
|
||||
if torchrun $(which pytest) -s -v ${TESTS_PATH}/${dir_name}/${test_name} -m "meta_reference and llama_3b" \
|
||||
if torchrun $(which pytest) -s -v ${TESTS_PATH}/${dir_name}/${test_name} -m "${PROVIDER_ID} and llama_3b" \
|
||||
--junitxml="${{ github.workspace }}/${new_file}"; then
|
||||
echo "Test passed: $test_name"
|
||||
else
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue