test: add retry on flaky test

This commit is contained in:
Krrish Dholakia 2024-12-02 21:06:13 -08:00
parent 2fda90742f
commit aaa8c7caf0

View file

@ -138,6 +138,7 @@ class BaseLLMChatTest(ABC):
except litellm.InternalServerError: except litellm.InternalServerError:
pytest.skip("Model is overloaded") pytest.skip("Model is overloaded")
@pytest.mark.flaky(retries=6, delay=1)
def test_json_response_format_stream(self): def test_json_response_format_stream(self):
""" """
Test that the JSON response format with streaming is supported by the LLM API Test that the JSON response format with streaming is supported by the LLM API