test(test_models.py): fix delete model test

This commit is contained in:
Krrish Dholakia 2024-04-04 08:45:32 -07:00
parent 674499b41a
commit 4b56f08cbe
3 changed files with 31 additions and 26 deletions

View file

@ -215,7 +215,7 @@ class ProxyChatCompletionRequest(LiteLLMBase):
class ModelInfoDelete(LiteLLMBase): class ModelInfoDelete(LiteLLMBase):
id: Optional[str] id: str
class ModelInfo(LiteLLMBase): class ModelInfo(LiteLLMBase):

View file

@ -6833,6 +6833,7 @@ async def add_new_model(
description="v2 - returns all the models set on the config.yaml, shows 'user_access' = True if the user has access to the model. Provides more info about each model in /models, including config.yaml descriptions (except api key and api base)", description="v2 - returns all the models set on the config.yaml, shows 'user_access' = True if the user has access to the model. Provides more info about each model in /models, including config.yaml descriptions (except api key and api base)",
tags=["model management"], tags=["model management"],
dependencies=[Depends(user_api_key_auth)], dependencies=[Depends(user_api_key_auth)],
include_in_schema=False,
) )
async def model_info_v2( async def model_info_v2(
user_api_key_dict: UserAPIKeyAuth = Depends(user_api_key_auth), user_api_key_dict: UserAPIKeyAuth = Depends(user_api_key_auth),
@ -7036,37 +7037,40 @@ async def model_info_v1(
async def delete_model(model_info: ModelInfoDelete): async def delete_model(model_info: ModelInfoDelete):
global llm_router, llm_model_list, general_settings, user_config_file_path, proxy_config global llm_router, llm_model_list, general_settings, user_config_file_path, proxy_config
try: try:
if not os.path.exists(user_config_file_path): """
raise HTTPException(status_code=404, detail="Config file does not exist.") [BETA] - This is a beta endpoint, format might change based on user feedback. - https://github.com/BerriAI/litellm/issues/964
# Load existing config - Check if id in db
config = await proxy_config.get_config() - Delete
"""
# If model_list is not in the config, nothing can be deleted global prisma_client
if len(config.get("model_list", [])) == 0:
if prisma_client is None:
raise HTTPException( raise HTTPException(
status_code=400, detail="No model list available in the config." status_code=500,
detail={
"error": "No DB Connected. Here's how to do it - https://docs.litellm.ai/docs/proxy/virtual_keys"
},
) )
# Check if the model with the specified model_id exists # update DB
model_to_delete = None if general_settings.get("store_model_in_db", False) == True:
"""
for model in config["model_list"]: - store model_list in db
if model.get("model_info", {}).get("id", None) == model_info.id: - store keys separately
model_to_delete = model """
break # encrypt litellm params #
await prisma_client.db.litellm_proxymodeltable.delete(
# If the model was not found, return an error where={"model_id": model_info.id}
if model_to_delete is None: )
raise HTTPException( else:
status_code=400, detail="Model with given model_id not found." raise HTTPException(
status_code=500,
detail={
"error": "Set `store_model_in_db: true` in general_settings on your config.yaml"
},
) )
# Remove model from the list and save the updated config
config["model_list"].remove(model_to_delete)
# Save updated config
config = await proxy_config.save_config(new_config=config)
return {"message": "Model deleted successfully"} return {"message": "Model deleted successfully"}
except Exception as e: except Exception as e:

View file

@ -186,5 +186,6 @@ async def test_delete_models():
key_gen = await generate_key(session=session) key_gen = await generate_key(session=session)
key = key_gen["key"] key = key_gen["key"]
await add_models(session=session, model_id=model_id) await add_models(session=session, model_id=model_id)
await asyncio.sleep(60)
await chat_completion(session=session, key=key) await chat_completion(session=session, key=key)
await delete_model(session=session, model_id=model_id) await delete_model(session=session, model_id=model_id)