handle flaky pytests

This commit is contained in:
Ishaan Jaff 2024-08-27 22:43:54 -07:00
parent 640e5d0dc9
commit bcf7f3e437
5 changed files with 13 additions and 2 deletions

View file

@ -31,6 +31,7 @@ jobs:
python -m pip install --upgrade pip python -m pip install --upgrade pip
python -m pip install -r .circleci/requirements.txt python -m pip install -r .circleci/requirements.txt
pip install "pytest==7.3.1" pip install "pytest==7.3.1"
pip install "pytest-retry==1.6.3"
pip install "pytest-asyncio==0.21.1" pip install "pytest-asyncio==0.21.1"
pip install mypy pip install mypy
pip install "google-generativeai==0.3.2" pip install "google-generativeai==0.3.2"
@ -171,6 +172,7 @@ jobs:
python -m pip install --upgrade pip python -m pip install --upgrade pip
python -m pip install -r .circleci/requirements.txt python -m pip install -r .circleci/requirements.txt
pip install "pytest==7.3.1" pip install "pytest==7.3.1"
pip install "pytest-retry==1.6.3"
pip install "pytest-mock==3.12.0" pip install "pytest-mock==3.12.0"
pip install "pytest-asyncio==0.21.1" pip install "pytest-asyncio==0.21.1"
pip install mypy pip install mypy
@ -282,6 +284,7 @@ jobs:
name: Install Dependencies name: Install Dependencies
command: | command: |
pip install "pytest==7.3.1" pip install "pytest==7.3.1"
pip install "pytest-retry==1.6.3"
pip install "pytest-asyncio==0.21.1" pip install "pytest-asyncio==0.21.1"
pip install aiohttp pip install aiohttp
pip install "openai==1.40.0" pip install "openai==1.40.0"

View file

@ -299,6 +299,7 @@ async def _gather_deploy(all_deploys):
@pytest.mark.parametrize( @pytest.mark.parametrize(
"ans_rpm", [1, 5] "ans_rpm", [1, 5]
) # 1 should produce nothing, 10 should select first ) # 1 should produce nothing, 10 should select first
@pytest.mark.flaky(retries=3, delay=1)
def test_get_available_endpoints_tpm_rpm_check_async(ans_rpm): def test_get_available_endpoints_tpm_rpm_check_async(ans_rpm):
""" """
Pass in list of 2 valid models Pass in list of 2 valid models

View file

@ -95,6 +95,7 @@ def test_create_batch(provider):
@pytest.mark.parametrize("provider", ["openai", "azure"]) @pytest.mark.parametrize("provider", ["openai", "azure"])
@pytest.mark.asyncio() @pytest.mark.asyncio()
@pytest.mark.flaky(retries=3, delay=1)
async def test_async_create_batch(provider): async def test_async_create_batch(provider):
""" """
1. Create File for Batch completion 1. Create File for Batch completion

View file

@ -1,7 +1,11 @@
#### What this tests #### #### What this tests ####
# This tests caching on the router # This tests caching on the router
import sys, os, time import asyncio
import traceback, asyncio import os
import sys
import time
import traceback
import pytest import pytest
sys.path.insert( sys.path.insert(
@ -71,6 +75,7 @@ def test_router_sync_caching_with_ssl_url():
@pytest.mark.asyncio @pytest.mark.asyncio
@pytest.mark.flaky(retries=3, delay=1)
async def test_acompletion_caching_on_router(): async def test_acompletion_caching_on_router():
# tests acompletion + caching on router # tests acompletion + caching on router
try: try:

View file

@ -325,6 +325,7 @@ async def test_chat_completion_ratelimit():
@pytest.mark.asyncio @pytest.mark.asyncio
@pytest.mark.flaky(retries=3, delay=1)
async def test_chat_completion_different_deployments(): async def test_chat_completion_different_deployments():
""" """
- call model group with 2 deployments - call model group with 2 deployments