mirror of
https://github.com/BerriAI/litellm.git
synced 2025-04-27 11:43:54 +00:00
handle predibase failing streaming tests
This commit is contained in:
parent
1915adfc2d
commit
a8ef88039a
1 changed files with 5 additions and 0 deletions
|
@ -1495,6 +1495,11 @@ async def test_parallel_streaming_requests(sync_mode, model):
|
||||||
|
|
||||||
except RateLimitError:
|
except RateLimitError:
|
||||||
pass
|
pass
|
||||||
|
except litellm.ServiceUnavailableError as e:
|
||||||
|
if model == "predibase/llama-3-8b-instruct":
|
||||||
|
pass
|
||||||
|
else:
|
||||||
|
pytest.fail(f"Service Unavailable Error got{str(e)}")
|
||||||
except litellm.InternalServerError as e:
|
except litellm.InternalServerError as e:
|
||||||
if "predibase" in str(e).lower():
|
if "predibase" in str(e).lower():
|
||||||
# only skip internal server error from predibase - their endpoint seems quite unstable
|
# only skip internal server error from predibase - their endpoint seems quite unstable
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue