fix(proxy_server.py): accept max tokens as int

This commit is contained in:
Krrish Dholakia 2023-12-01 15:19:34 -08:00
parent 6fd50177e8
commit 923f90aed2
2 changed files with 3 additions and 3 deletions

View file

@ -45,7 +45,7 @@ def test_chat_completion():
pytest.fail("LiteLLM Proxy test failed. Exception", e)
# Run the test
# test_chat_completion()
test_chat_completion()
def test_chat_completion_azure():