fix(utils.py): replacing openai.error import statements

This commit is contained in:
Krrish Dholakia 2023-11-11 19:25:21 -08:00
parent c5c3096a47
commit 62013520aa
3 changed files with 5 additions and 6 deletions

View file

@ -7,7 +7,7 @@ import pytest
sys.path.insert(
0, os.path.abspath("../..")
) # Adds the parent directory to the system path
from openai.error import Timeout
from openai import Timeout
import litellm
from litellm import batch_completion, batch_completion_models, completion, batch_completion_models_all_responses
# litellm.set_verbose=True

View file

@ -106,7 +106,7 @@ def test_completion_gpt4_turbo():
max_tokens=10,
)
print(response)
except openai.error.RateLimitError:
except openai.RateLimitError:
print("got a rate liimt error")
pass
except Exception as e:
@ -137,7 +137,7 @@ def test_completion_gpt4_vision():
],
)
print(response)
except openai.error.RateLimitError:
except openai.RateLimitError:
print("got a rate liimt error")
pass
except Exception as e:

View file

@ -1136,9 +1136,8 @@ def client(original_function):
context_window_fallback_dict = kwargs.get("context_window_fallback_dict", {})
if num_retries:
if (isinstance(e, openai.error.APIError)
or isinstance(e, openai.error.Timeout)
or isinstance(e, openai.error.ServiceUnavailableError)):
if (isinstance(e, openai.APIError)
or isinstance(e, openai.Timeout)):
kwargs["num_retries"] = num_retries
return litellm.completion_with_retries(*args, **kwargs)
elif isinstance(e, litellm.exceptions.ContextWindowExceededError) and context_window_fallback_dict and model in context_window_fallback_dict: