Merge pull request #5655 from BerriAI/litellm_testing_clean_up

[Fix Ci/cd] Separate testing pipeline for litellm router
This commit is contained in:
Ishaan Jaff 2024-09-12 11:05:26 -07:00 committed by GitHub
commit ead1e0c708
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
3 changed files with 73 additions and 2 deletions

View file

@ -105,7 +105,7 @@ jobs:
command: |
pwd
ls
python -m pytest -vv litellm/tests/ -x --junitxml=test-results/junit.xml --durations=5 -k "not test_python_38.py"
python -m pytest -vv litellm/tests/ -x --junitxml=test-results/junit.xml --durations=5 -k "not test_python_38.py and not router and not assistants"
no_output_timeout: 120m
# Store test results
@ -149,6 +149,60 @@ jobs:
# Store test results
- store_test_results:
path: test-results
litellm_router_testing: # Runs all tests with the "router" keyword
docker:
- image: cimg/python:3.11
working_directory: ~/project
steps:
- checkout
- run:
name: Install Dependencies
command: |
python -m pip install --upgrade pip
python -m pip install -r requirements.txt
pip install "pytest==7.3.1"
pip install "respx==0.21.1"
pip install "pytest-retry==1.6.3"
pip install "pytest-asyncio==0.21.1"
# Run pytest and generate JUnit XML report
- run:
name: Run tests
command: |
pwd
ls
python -m pytest litellm/tests/ -vv -k "router" -x -s -v --junitxml=test-results/junit.xml --durations=5
no_output_timeout: 120m
# Store test results
- store_test_results:
path: test-results
litellm_assistants_api_testing: # Runs all tests with the "assistants" keyword
docker:
- image: cimg/python:3.11
working_directory: ~/project
steps:
- checkout
- run:
name: Install Dependencies
command: |
python -m pip install --upgrade pip
python -m pip install -r requirements.txt
pip install "pytest==7.3.1"
pip install "respx==0.21.1"
pip install "pytest-retry==1.6.3"
pip install "pytest-asyncio==0.21.1"
# Run pytest and generate JUnit XML report
- run:
name: Run tests
command: |
pwd
ls
python -m pytest litellm/tests/ -vv -k "assistants" -x -s -v --junitxml=test-results/junit.xml --durations=5
no_output_timeout: 120m
# Store test results
- store_test_results:
path: test-results
load_testing:
docker:
- image: cimg/python:3.11
@ -604,6 +658,18 @@ workflows:
only:
- main
- /litellm_.*/
- litellm_assistants_api_testing:
filters:
branches:
only:
- main
- /litellm_.*/
- litellm_router_testing:
filters:
branches:
only:
- main
- /litellm_.*/
- ui_endpoint_testing:
filters:
branches:
@ -645,6 +711,10 @@ workflows:
- local_testing
- build_and_test
- load_testing
- litellm_router_testing
- litellm_assistants_api_testing
- ui_endpoint_testing
- installing_litellm_on_python
- proxy_log_to_otel_tests
- proxy_pass_through_endpoint_tests
filters:

View file

@ -24,7 +24,7 @@ from litellm import RateLimitError, Timeout, completion, completion_cost, embedd
from litellm.llms.custom_httpx.http_handler import AsyncHTTPHandler, HTTPHandler
from litellm.llms.prompt_templates.factory import anthropic_messages_pt
# litellm.num_retries=3
# litellm.num_retries =3
litellm.cache = None
litellm.success_callback = []
user_message = "Write a short poem about the sky"

View file

@ -1431,6 +1431,7 @@ async def test_completion_replicate_llama3_streaming(sync_mode):
],
)
@pytest.mark.asyncio
@pytest.mark.flaky(retries=3, delay=1)
async def test_bedrock_httpx_streaming(sync_mode, model, region):
try:
litellm.set_verbose = True