mirror of
https://github.com/meta-llama/llama-stack.git
synced 2025-08-05 10:13:05 +00:00
matrixify
This commit is contained in:
parent
e483004d82
commit
968c9a8346
1 changed files with 6 additions and 10 deletions
16
.github/workflows/integration-tests.yml
vendored
16
.github/workflows/integration-tests.yml
vendored
|
@ -26,6 +26,7 @@ jobs:
|
|||
# Listing tests manually since some of them currently fail
|
||||
# TODO: generate matrix list from tests/integration when fixed
|
||||
test-type: [agents, inference, datasets, inspect, scoring, post_training, providers]
|
||||
stack-config: [ollama, http://localhost:8321]
|
||||
fail-fast: false # we want to run all tests regardless of failure
|
||||
|
||||
steps:
|
||||
|
@ -76,6 +77,7 @@ jobs:
|
|||
exit 1
|
||||
|
||||
- name: Start Llama Stack server in background
|
||||
if: matrix.stack-config == "http://localhost:8321"
|
||||
env:
|
||||
INFERENCE_MODEL: "meta-llama/Llama-3.2-3B-Instruct"
|
||||
run: |
|
||||
|
@ -83,6 +85,7 @@ jobs:
|
|||
nohup uv run llama stack run ./llama_stack/templates/ollama/run.yaml --image-type venv > server.log 2>&1 &
|
||||
|
||||
- name: Wait for Llama Stack server to be ready
|
||||
if: matrix.stack-config == "http://localhost:8321"
|
||||
run: |
|
||||
echo "Waiting for Llama Stack server..."
|
||||
for i in {1..30}; do
|
||||
|
@ -96,18 +99,11 @@ jobs:
|
|||
cat server.log
|
||||
exit 1
|
||||
|
||||
- name: Run Integration Tests via library client
|
||||
- name: Run Integration Tests
|
||||
env:
|
||||
INFERENCE_MODEL: "meta-llama/Llama-3.2-3B-Instruct"
|
||||
run: |
|
||||
uv run pytest -v tests/integration/${{ matrix.test-type }} --stack-config=ollama \
|
||||
-k "not(builtin_tool_code or safety_with_image or code_interpreter_for)" \
|
||||
--text-model="meta-llama/Llama-3.2-3B-Instruct" \
|
||||
--embedding-model=all-MiniLM-L6-v2
|
||||
|
||||
- name: Run Integration Tests via http client
|
||||
run: |
|
||||
uv run pytest -v tests/integration/${{ matrix.test-type }} --stack-config=http://localhost:8321 \
|
||||
-k "not(builtin_tool_code or safety_with_image or code_interpreter_for)" \
|
||||
uv run pytest -v tests/integration/${{ matrix.test-type }} --stack-config=${{ matrix.stack-config }} \
|
||||
-k "not(builtin_tool or safety_with_image or code_interpreter or test_rag)" \
|
||||
--text-model="meta-llama/Llama-3.2-3B-Instruct" \
|
||||
--embedding-model=all-MiniLM-L6-v2
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue