mirror of
https://github.com/BerriAI/litellm.git
synced 2025-04-26 03:04:13 +00:00
test: add retry on flaky test
This commit is contained in:
parent
2fda90742f
commit
aaa8c7caf0
1 changed files with 1 additions and 0 deletions
|
@ -138,6 +138,7 @@ class BaseLLMChatTest(ABC):
|
||||||
except litellm.InternalServerError:
|
except litellm.InternalServerError:
|
||||||
pytest.skip("Model is overloaded")
|
pytest.skip("Model is overloaded")
|
||||||
|
|
||||||
|
@pytest.mark.flaky(retries=6, delay=1)
|
||||||
def test_json_response_format_stream(self):
|
def test_json_response_format_stream(self):
|
||||||
"""
|
"""
|
||||||
Test that the JSON response format with streaming is supported by the LLM API
|
Test that the JSON response format with streaming is supported by the LLM API
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue