mirror of
https://github.com/BerriAI/litellm.git
synced 2025-04-27 03:34:10 +00:00
fix(proxy_server.py): accept max tokens as int
This commit is contained in:
parent
6fd50177e8
commit
923f90aed2
2 changed files with 3 additions and 3 deletions
|
@ -45,7 +45,7 @@ def test_chat_completion():
|
|||
pytest.fail("LiteLLM Proxy test failed. Exception", e)
|
||||
|
||||
# Run the test
|
||||
# test_chat_completion()
|
||||
test_chat_completion()
|
||||
|
||||
|
||||
def test_chat_completion_azure():
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue