mirror of
https://github.com/BerriAI/litellm.git
synced 2025-04-24 18:24:20 +00:00
fix(utils.py): replacing openai.error import statements
This commit is contained in:
parent
c5c3096a47
commit
62013520aa
3 changed files with 5 additions and 6 deletions
|
@ -7,7 +7,7 @@ import pytest
|
|||
sys.path.insert(
|
||||
0, os.path.abspath("../..")
|
||||
) # Adds the parent directory to the system path
|
||||
from openai.error import Timeout
|
||||
from openai import Timeout
|
||||
import litellm
|
||||
from litellm import batch_completion, batch_completion_models, completion, batch_completion_models_all_responses
|
||||
# litellm.set_verbose=True
|
||||
|
|
|
@ -106,7 +106,7 @@ def test_completion_gpt4_turbo():
|
|||
max_tokens=10,
|
||||
)
|
||||
print(response)
|
||||
except openai.error.RateLimitError:
|
||||
except openai.RateLimitError:
|
||||
print("got a rate liimt error")
|
||||
pass
|
||||
except Exception as e:
|
||||
|
@ -137,7 +137,7 @@ def test_completion_gpt4_vision():
|
|||
],
|
||||
)
|
||||
print(response)
|
||||
except openai.error.RateLimitError:
|
||||
except openai.RateLimitError:
|
||||
print("got a rate liimt error")
|
||||
pass
|
||||
except Exception as e:
|
||||
|
|
|
@ -1136,9 +1136,8 @@ def client(original_function):
|
|||
context_window_fallback_dict = kwargs.get("context_window_fallback_dict", {})
|
||||
|
||||
if num_retries:
|
||||
if (isinstance(e, openai.error.APIError)
|
||||
or isinstance(e, openai.error.Timeout)
|
||||
or isinstance(e, openai.error.ServiceUnavailableError)):
|
||||
if (isinstance(e, openai.APIError)
|
||||
or isinstance(e, openai.Timeout)):
|
||||
kwargs["num_retries"] = num_retries
|
||||
return litellm.completion_with_retries(*args, **kwargs)
|
||||
elif isinstance(e, litellm.exceptions.ContextWindowExceededError) and context_window_fallback_dict and model in context_window_fallback_dict:
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue