Make return-exceptions as default behavior in litellm.batch_completion

This commit is contained in:
ffreemt 2024-05-24 11:09:11 +08:00
parent 2713272bba
commit 86d46308bf
2 changed files with 8 additions and 27 deletions

View file

@ -2332,11 +2332,6 @@ def batch_completion(
"""
args = locals()
# extra kw for dealing with exceptions
return_exceptions = args.get("kwargs").get("return_exceptions", False)
if "return_exceptions" in args.get("kwargs"):
args.get("kwargs").pop("return_exceptions")
batch_messages = messages
completions = []
model = model
@ -2391,15 +2386,14 @@ def batch_completion(
# Retrieve the results from the futures
# results = [future.result() for future in completions]
if return_exceptions:
results = []
for future in completions:
try:
results.append(future.result())
except Exception as exc:
results.append(exc)
else: # original
results = [future.result() for future in completions]
# return exceptions if any
results = []
for future in completions:
try:
results.append(future.result())
except Exception as exc:
results.append(exc)
return results

View file

@ -1,30 +1,17 @@
"""https://github.com/BerriAI/litellm/pull/3397/commits/a7ec1772b1457594d3af48cdcb0a382279b841c7#diff-44852387ceb00aade916d6b314dfd5d180499e54f35209ae9c07179febe08b4b."""
"""Test batch_completion's return_exceptions."""
import pytest
import litellm
msg1 = [{"role": "user", "content": "hi 1"}]
msg2 = [{"role": "user", "content": "hi 2"}]
def test_batch_completion_return_exceptions_default():
"""Test batch_completion's return_exceptions."""
with pytest.raises(Exception):
_ = litellm.batch_completion(
model="gpt-3.5-turbo",
messages=[msg1, msg2],
api_key="sk_xxx", # deliberately set invalid key
# return_exceptions=False,
)
def test_batch_completion_return_exceptions_true():
"""Test batch_completion's return_exceptions."""
res = litellm.batch_completion(
model="gpt-3.5-turbo",
messages=[msg1, msg2],
api_key="sk_xxx", # deliberately set invalid key
return_exceptions=True,
)
assert isinstance(res[0], litellm.exceptions.AuthenticationError)