test(test_proxy_server.py): add testing for /model/new endpoint

This commit is contained in:
Krrish Dholakia 2023-12-09 22:43:27 -08:00
parent a5fddf55dd
commit 0a5207a558

View file

@ -122,7 +122,7 @@ def test_embedding(client):
# Run the test # Run the test
# test_embedding() # test_embedding()
@pytest.mark.skip(reason="hitting yaml load issues on circle-ci") # @pytest.mark.skip(reason="hitting yaml load issues on circle-ci")
def test_add_new_model(client): def test_add_new_model(client):
global headers global headers
try: try:
@ -142,8 +142,8 @@ def test_add_new_model(client):
print(f"response: {result}") print(f"response: {result}")
model_info = None model_info = None
for m in result["data"]: for m in result["data"]:
if m["id"]["model_name"] == "test_openai_models": if m["model_name"] == "test_openai_models":
model_info = m["id"]["model_info"] model_info = m["model_info"]
assert model_info["description"] == "this is a test openai model" assert model_info["description"] == "this is a test openai model"
except Exception as e: except Exception as e:
pytest.fail(f"LiteLLM Proxy test failed. Exception {str(e)}") pytest.fail(f"LiteLLM Proxy test failed. Exception {str(e)}")