mirror of
https://github.com/BerriAI/litellm.git
synced 2025-04-26 19:24:27 +00:00
fix test proxy exception mapping
This commit is contained in:
parent
58828403ea
commit
c50a60004f
2 changed files with 18 additions and 4 deletions
|
@ -229,8 +229,9 @@ def test_chat_completion_exception_any_model(client):
|
|||
)
|
||||
assert isinstance(openai_exception, openai.BadRequestError)
|
||||
_error_message = openai_exception.message
|
||||
assert "chat_completion: Invalid model name passed in model=Lite-GPT-12" in str(
|
||||
_error_message
|
||||
assert (
|
||||
"/chat/completions: Invalid model name passed in model=Lite-GPT-12"
|
||||
in str(_error_message)
|
||||
)
|
||||
|
||||
except Exception as e:
|
||||
|
@ -259,7 +260,7 @@ def test_embedding_exception_any_model(client):
|
|||
print("Exception raised=", openai_exception)
|
||||
assert isinstance(openai_exception, openai.BadRequestError)
|
||||
_error_message = openai_exception.message
|
||||
assert "embeddings: Invalid model name passed in model=Lite-GPT-12" in str(
|
||||
assert "/embeddings: Invalid model name passed in model=Lite-GPT-12" in str(
|
||||
_error_message
|
||||
)
|
||||
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue