fix(proxy_server.py): add testing for model info being added to /model/new

This commit is contained in:
Krrish Dholakia 2023-12-02 14:15:38 -08:00
parent 278ef6bd83
commit 5a4c054eef
3 changed files with 37 additions and 10 deletions

View file

@ -93,3 +93,30 @@ def test_embedding():
# Run the test
# test_embedding()
def test_add_new_model():
try:
test_data = {
"model_name": "test_openai_models",
"litellm_params": {
"model": "gpt-3.5-turbo",
},
"model_info": {
"description": "this is a test openai model"
}
}
client.post("/model/new", json=test_data)
response = client.get("/model/info")
assert response.status_code == 200
result = response.json()
print(f"response: {result}")
model_info = None
for m in result["data"]:
if m["id"]["model_name"] == "test_openai_models":
model_info = m["id"]["model_info"]
assert model_info["description"] == "this is a test openai model"
except Exception as e:
pytest.fail(f"LiteLLM Proxy test failed. Exception {str(e)}")
test_add_new_model()