This commit is contained in:
Sixian Yi 2025-01-16 18:02:48 -08:00
parent 3e7496e835
commit ac5b3451b7
2 changed files with 31 additions and 1 deletions

View file

@ -201,7 +201,9 @@ jobs:
runs-on: ubuntu-latest
env:
TOGETHER_API_KEY: ${{ secrets.TOGETHER_API_KEY }}
FIREWORKS_API_KEY: ${{ secrets.FIREWORKS_API_KEY }}
TAVILY_SEARCH_API_KEY: ${{ secrets.TAVILY_SEARCH_API_KEY }}
steps:
- uses: actions/checkout@v4
with:
@ -241,4 +243,14 @@ jobs:
pytest -v -s --nbval-lax ./docs/notebooks/Llama_Stack_Building_AI_Applications.ipynb
pytest -v -s --nbval-lax ./docs/notebooks/Llama_Stack_Benchmark_Evals.ipynb
# TODO: add trigger for integration test workflow & docker builds
- name: Integration tests
run: |
pip install -U \
torch torchvision \
pytest pytest_asyncio \
fairscale lm-format-enforcer \
zmq chardet pypdf \
pandas sentence_transformers together \
aiosqlite groq fireworks-ai \
pytest-asyncio pytest_html
pytest ./llama_stack/providers/tests/ --config=github_ci_test_config.yaml

View file

@ -0,0 +1,18 @@
inference:
tests:
- inference/test_vision_inference.py::test_vision_chat_completion_streaming
- inference/test_vision_inference.py::test_vision_chat_completion_non_streaming
- inference/test_text_inference.py::test_structured_output
- inference/test_text_inference.py::test_chat_completion_streaming
- inference/test_text_inference.py::test_chat_completion_non_streaming
- inference/test_text_inference.py::test_chat_completion_with_tool_calling
- inference/test_text_inference.py::test_chat_completion_with_tool_calling_streaming
scenarios:
- fixture_combo_id: fireworks
- provider_fixtures:
inference: together
inference_models:
- meta-llama/Llama-3.1-8B-Instruct
- meta-llama/Llama-3.2-11B-Vision-Instruct