diff --git a/litellm/__pycache__/utils.cpython-311.pyc b/litellm/__pycache__/utils.cpython-311.pyc index bbb8e3fd3..71efd9879 100644 Binary files a/litellm/__pycache__/utils.cpython-311.pyc and b/litellm/__pycache__/utils.cpython-311.pyc differ diff --git a/litellm/tests/test_exceptions.py b/litellm/tests/test_exceptions.py index f8cb8a1b8..95e7f7774 100644 --- a/litellm/tests/test_exceptions.py +++ b/litellm/tests/test_exceptions.py @@ -2,6 +2,7 @@ from openai.error import AuthenticationError, InvalidRequestError, RateLimitErro import os import sys import traceback +import subprocess sys.path.insert( 0, os.path.abspath("../..") @@ -36,6 +37,7 @@ litellm.vertex_location = "us-central1" models = ["gpt-3.5-turbo"] test_model = "claude-instant-1" +# Test 1: Context Window Errors @pytest.mark.parametrize("model", models) def test_context_window(model): sample_text = "how does a court case get to the Supreme Court?" * 1000 @@ -43,7 +45,19 @@ def test_context_window(model): with pytest.raises(ContextWindowExceededError): completion(model=model, messages=messages) -test_context_window(models[0]) + +def test_uninstall_cohere_and_completion_call(): + # Uninstall cohere package + subprocess.call(["pip", "uninstall", "cohere"]) + + model = "command-nightly" + sample_text = "how does a court case get to the Supreme Court?" * 1000 + messages = [{"content": sample_text, "role": "user"}] + + # with pytest.raises(Exception): + completion(model=model, messages=messages) + +test_uninstall_cohere_and_completion_call() # Test 2: InvalidAuth Errors @pytest.mark.parametrize("model", models) diff --git a/litellm/utils.py b/litellm/utils.py index 90f0b7c4c..416cfc205 100644 --- a/litellm/utils.py +++ b/litellm/utils.py @@ -1762,7 +1762,7 @@ def exception_type(model, original_exception, custom_llm_provider): llm_provider="cohere", model=model ) - raise ValueError(original_exception) + raise original_exception elif custom_llm_provider == "huggingface": if "length limit exceeded" in error_str: exception_mapping_worked = True @@ -1915,7 +1915,7 @@ def exception_type(model, original_exception, custom_llm_provider): model=model ) else: - raise ValueError(original_exception) + raise original_exception except Exception as e: # LOGGING exception_logging(