mirror of
https://github.com/BerriAI/litellm.git
synced 2025-04-26 03:04:13 +00:00
fix(proxy_server): returns better error messages for invalid api errors
This commit is contained in:
parent
262f874621
commit
42e0d7cf68
3 changed files with 67 additions and 72 deletions
|
@ -54,17 +54,19 @@ def test_completion_invalid_param_cohere():
|
|||
else:
|
||||
pytest.fail(f'An error occurred {e}')
|
||||
|
||||
test_completion_invalid_param_cohere()
|
||||
# test_completion_invalid_param_cohere()s
|
||||
|
||||
def test_completion_function_call_cohere():
|
||||
try:
|
||||
response = completion(model="command-nightly", messages=messages, function_call="TEST-FUNCTION")
|
||||
response = completion(model="command-nightly", messages=messages, functions=["TEST-FUNCTION"])
|
||||
except Exception as e:
|
||||
if "Function calling is not supported by this provider" in str(e):
|
||||
pass
|
||||
else:
|
||||
pytest.fail(f'An error occurred {e}')
|
||||
|
||||
test_completion_function_call_cohere()
|
||||
|
||||
def test_completion_function_call_openai():
|
||||
try:
|
||||
messages = [{"role": "user", "content": "What is the weather like in Boston?"}]
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue