forked from phoenix/litellm-mirror
feat(proxy_server.py): adding /model/delete
endpoint
This commit is contained in:
parent
ff028111cf
commit
92b2cbcdc5
2 changed files with 57 additions and 4 deletions
|
@ -999,7 +999,7 @@ async def info_key_fn(key: str = fastapi.Query(..., description="Key in the requ
|
||||||
)
|
)
|
||||||
|
|
||||||
#### MODEL MANAGEMENT ####
|
#### MODEL MANAGEMENT ####
|
||||||
|
|
||||||
#### [BETA] - This is a beta endpoint, format might change based on user feedback. - https://github.com/BerriAI/litellm/issues/964
|
#### [BETA] - This is a beta endpoint, format might change based on user feedback. - https://github.com/BerriAI/litellm/issues/964
|
||||||
@router.post("/model/new", description="Allows adding new models to the model list in the config.yaml", tags=["model management"], dependencies=[Depends(user_api_key_auth)])
|
@router.post("/model/new", description="Allows adding new models to the model list in the config.yaml", tags=["model management"], dependencies=[Depends(user_api_key_auth)])
|
||||||
async def add_new_model(model_params: ModelParams):
|
async def add_new_model(model_params: ModelParams):
|
||||||
|
@ -1074,6 +1074,48 @@ async def model_info(request: Request):
|
||||||
object="list",
|
object="list",
|
||||||
)
|
)
|
||||||
|
|
||||||
|
#### [BETA] - This is a beta endpoint, format might change based on user feedback. - https://github.com/BerriAI/litellm/issues/964
|
||||||
|
@router.post("/model/delete", description="Allows deleting models in the model list in the config.yaml", tags=["model management"], dependencies=[Depends(user_api_key_auth)])
|
||||||
|
async def delete_model(model_info: ModelInfoDelete):
|
||||||
|
global llm_router, llm_model_list, general_settings, user_config_file_path
|
||||||
|
try:
|
||||||
|
if not os.path.exists(user_config_file_path):
|
||||||
|
raise HTTPException(status_code=404, detail="Config file does not exist.")
|
||||||
|
|
||||||
|
with open(user_config_file_path, "r") as config_file:
|
||||||
|
config = yaml.safe_load(config_file)
|
||||||
|
|
||||||
|
# If model_list is not in the config, nothing can be deleted
|
||||||
|
if 'model_list' not in config:
|
||||||
|
raise HTTPException(status_code=404, detail="No model list available in the config.")
|
||||||
|
|
||||||
|
# Check if the model with the specified model_id exists
|
||||||
|
model_to_delete = None
|
||||||
|
for model in config['model_list']:
|
||||||
|
if model.get('model_info', {}).get('id', None) == model_info.id:
|
||||||
|
model_to_delete = model
|
||||||
|
break
|
||||||
|
|
||||||
|
# If the model was not found, return an error
|
||||||
|
if model_to_delete is None:
|
||||||
|
raise HTTPException(status_code=404, detail="Model with given model_id not found.")
|
||||||
|
|
||||||
|
# Remove model from the list and save the updated config
|
||||||
|
config['model_list'].remove(model_to_delete)
|
||||||
|
with open(user_config_file_path, "w") as config_file:
|
||||||
|
yaml.dump(config, config_file, default_flow_style=False)
|
||||||
|
|
||||||
|
# Update Router
|
||||||
|
llm_router, llm_model_list, general_settings = load_router_config(router=llm_router, config_file_path=user_config_file_path)
|
||||||
|
|
||||||
|
return {"message": "Model deleted successfully"}
|
||||||
|
|
||||||
|
except HTTPException as e:
|
||||||
|
# Re-raise the HTTP exceptions to be handled by FastAPI
|
||||||
|
raise
|
||||||
|
except Exception as e:
|
||||||
|
raise HTTPException(status_code=500, detail=f"Internal Server Error: {str(e)}")
|
||||||
|
|
||||||
#### EXPERIMENTAL QUEUING ####
|
#### EXPERIMENTAL QUEUING ####
|
||||||
@router.post("/queue/request", dependencies=[Depends(user_api_key_auth)])
|
@router.post("/queue/request", dependencies=[Depends(user_api_key_auth)])
|
||||||
async def async_queue_request(request: Request):
|
async def async_queue_request(request: Request):
|
||||||
|
|
|
@ -1,7 +1,7 @@
|
||||||
from pydantic import BaseModel
|
from pydantic import BaseModel, Extra
|
||||||
from typing import Optional, List, Union, Dict
|
from typing import Optional, List, Union, Dict
|
||||||
from datetime import datetime
|
from datetime import datetime
|
||||||
|
import uuid
|
||||||
######### Request Class Definition ######
|
######### Request Class Definition ######
|
||||||
class ProxyChatCompletionRequest(BaseModel):
|
class ProxyChatCompletionRequest(BaseModel):
|
||||||
model: str
|
model: str
|
||||||
|
@ -38,10 +38,21 @@ class ProxyChatCompletionRequest(BaseModel):
|
||||||
class Config:
|
class Config:
|
||||||
extra='allow' # allow params not defined here, these fall in litellm.completion(**kwargs)
|
extra='allow' # allow params not defined here, these fall in litellm.completion(**kwargs)
|
||||||
|
|
||||||
|
class ModelInfo(BaseModel):
|
||||||
|
id: Optional[str]
|
||||||
|
|
||||||
|
class Config:
|
||||||
|
extra = Extra.allow # Allow extra fields
|
||||||
|
protected_namespaces = ()
|
||||||
|
|
||||||
|
class ModelInfoDelete(BaseModel):
|
||||||
|
id: Optional[str]
|
||||||
|
|
||||||
class ModelParams(BaseModel):
|
class ModelParams(BaseModel):
|
||||||
model_name: str
|
model_name: str
|
||||||
litellm_params: dict
|
litellm_params: dict
|
||||||
model_info: Optional[dict]
|
model_info: Optional[ModelInfo]=None
|
||||||
|
|
||||||
class Config:
|
class Config:
|
||||||
protected_namespaces = ()
|
protected_namespaces = ()
|
||||||
|
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue