Allow editing model api key + provider on UI (#8406)

* fix(parallel_request_limiter.py): add back parallel request information to max parallel request limiter

Resolves https://github.com/BerriAI/litellm/issues/8392

* test: mark flaky test to handle time based tracking issues

* feat(model_management_endpoints.py): expose new patch `/model/{model_id}/update` endpoint

Allows updating specific values of a model in db - makes it easy for admin to know this by calling it a PA
TCH

* feat(edit_model_modal.tsx): allow user to update llm provider + api key on the ui

* fix: fix linting error
This commit is contained in:
Krish Dholakia 2025-02-08 23:50:47 -08:00 committed by GitHub
parent 0d2e723e95
commit e4411e4815
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
7 changed files with 285 additions and 11 deletions

View file

@ -196,6 +196,9 @@ from litellm.proxy.management_endpoints.key_management_endpoints import (
from litellm.proxy.management_endpoints.key_management_endpoints import (
router as key_management_router,
)
from litellm.proxy.management_endpoints.model_management_endpoints import (
router as model_management_router,
)
from litellm.proxy.management_endpoints.organization_endpoints import (
router as organization_router,
)
@ -6042,6 +6045,11 @@ async def update_model(
model_params: updateDeployment,
user_api_key_dict: UserAPIKeyAuth = Depends(user_api_key_auth),
):
"""
Old endpoint for model update. Makes a PUT request.
Use `/model/{model_id}/update` to PATCH the stored model in db.
"""
global llm_router, llm_model_list, general_settings, user_config_file_path, proxy_config, prisma_client, master_key, store_model_in_db, proxy_logging_obj
try:
import base64
@ -8924,3 +8932,4 @@ app.include_router(ui_crud_endpoints_router)
app.include_router(openai_files_router)
app.include_router(team_callback_router)
app.include_router(budget_management_router)
app.include_router(model_management_router)