mirror of
https://github.com/BerriAI/litellm.git
synced 2025-04-26 03:04:13 +00:00
fix(utils.py): improved predibase exception mapping
adds unit testing + better coverage for predibase errors
This commit is contained in:
parent
93a3a0cc1e
commit
1dafb1b3b7
11 changed files with 220 additions and 46 deletions
|
@ -3,6 +3,7 @@ import os
|
|||
import sys
|
||||
import traceback
|
||||
import subprocess, asyncio
|
||||
from typing import Any
|
||||
|
||||
sys.path.insert(
|
||||
0, os.path.abspath("../..")
|
||||
|
@ -19,6 +20,7 @@ from litellm import (
|
|||
)
|
||||
from concurrent.futures import ThreadPoolExecutor
|
||||
import pytest
|
||||
from unittest.mock import patch, MagicMock
|
||||
|
||||
litellm.vertex_project = "pathrise-convert-1606954137718"
|
||||
litellm.vertex_location = "us-central1"
|
||||
|
@ -655,3 +657,47 @@ def test_litellm_predibase_exception():
|
|||
|
||||
# accuracy_score = counts[True]/(counts[True] + counts[False])
|
||||
# print(f"accuracy_score: {accuracy_score}")
|
||||
|
||||
|
||||
@pytest.mark.parametrize("provider", ["predibase"])
|
||||
def test_exception_mapping(provider):
|
||||
"""
|
||||
For predibase, run through a set of mock exceptions
|
||||
|
||||
assert that they are being mapped correctly
|
||||
"""
|
||||
litellm.set_verbose = True
|
||||
error_map = {
|
||||
400: litellm.BadRequestError,
|
||||
401: litellm.AuthenticationError,
|
||||
404: litellm.NotFoundError,
|
||||
408: litellm.Timeout,
|
||||
429: litellm.RateLimitError,
|
||||
500: litellm.InternalServerError,
|
||||
503: litellm.ServiceUnavailableError,
|
||||
}
|
||||
|
||||
for code, expected_exception in error_map.items():
|
||||
mock_response = Exception()
|
||||
setattr(mock_response, "text", "This is an error message")
|
||||
setattr(mock_response, "llm_provider", provider)
|
||||
setattr(mock_response, "status_code", code)
|
||||
|
||||
response: Any = None
|
||||
try:
|
||||
response = completion(
|
||||
model="{}/test-model".format(provider),
|
||||
messages=[{"role": "user", "content": "Hey, how's it going?"}],
|
||||
mock_response=mock_response,
|
||||
)
|
||||
except expected_exception:
|
||||
continue
|
||||
except Exception as e:
|
||||
response = "{}\n{}".format(str(e), traceback.format_exc())
|
||||
pytest.fail(
|
||||
"Did not raise expected exception. Expected={}, Return={},".format(
|
||||
expected_exception, response
|
||||
)
|
||||
)
|
||||
|
||||
pass
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue