forked from phoenix/litellm-mirror
Add litellm\tests\test_batch_completion_return_exceptions.py
This commit is contained in:
parent
64d229caaa
commit
a7ec1772b1
3 changed files with 33 additions and 1 deletions
2
.gitignore
vendored
2
.gitignore
vendored
|
@ -52,3 +52,5 @@ litellm/proxy/_new_secret_config.yaml
|
|||
litellm/proxy/_new_secret_config.yaml
|
||||
litellm/proxy/_super_secret_config.yaml
|
||||
litellm/proxy/_super_secret_config.yaml
|
||||
.python-version
|
||||
litellm/llms/tokenizers/9b5ad71b2ce5302211f9c61530b329a4922fc6a4
|
||||
|
|
|
@ -2280,6 +2280,7 @@ def batch_completion(
|
|||
deployment_id=None,
|
||||
request_timeout: Optional[int] = None,
|
||||
timeout: Optional[int] = 600,
|
||||
return_exceptions: bool = False,
|
||||
# Optional liteLLM function params
|
||||
**kwargs,
|
||||
):
|
||||
|
|
29
litellm/tests/test_batch_completion_return_exceptions.py
Normal file
29
litellm/tests/test_batch_completion_return_exceptions.py
Normal file
|
@ -0,0 +1,29 @@
|
|||
"""Test batch_completion's return_exceptions."""
|
||||
import pytest
|
||||
import litellm
|
||||
|
||||
msg1 = [{"role": "user", "content": "hi 1"}]
|
||||
msg2 = [{"role": "user", "content": "hi 2"}]
|
||||
|
||||
|
||||
def test_batch_completion_return_exceptions_default():
|
||||
"""Test batch_completion's return_exceptions."""
|
||||
with pytest.raises(Exception):
|
||||
_ = litellm.batch_completion(
|
||||
model="gpt-3.5-turbo",
|
||||
messages=[msg1, msg2],
|
||||
api_key="sk_xxx", # deliberately set invalid key
|
||||
# return_exceptions=False,
|
||||
)
|
||||
|
||||
|
||||
def test_batch_completion_return_exceptions_true():
|
||||
"""Test batch_completion's return_exceptions."""
|
||||
res = litellm.batch_completion(
|
||||
model="gpt-3.5-turbo",
|
||||
messages=[msg1, msg2],
|
||||
api_key="sk_xxx", # deliberately set invalid key
|
||||
return_exceptions=True,
|
||||
)
|
||||
|
||||
assert isinstance(res[0], litellm.exceptions.AuthenticationError)
|
Loading…
Add table
Add a link
Reference in a new issue