forked from phoenix/litellm-mirror
Merge pull request #2965 from BerriAI/litellm_fix_key_update
fix - delete key from inMemory Cache after /key/update
This commit is contained in:
commit
8ba140b09e
1 changed files with 7 additions and 0 deletions
|
@ -4449,6 +4449,13 @@ async def update_key_fn(request: Request, data: UpdateKeyRequest):
|
||||||
response = await prisma_client.update_data(
|
response = await prisma_client.update_data(
|
||||||
token=key, data={**non_default_values, "token": key}
|
token=key, data={**non_default_values, "token": key}
|
||||||
)
|
)
|
||||||
|
|
||||||
|
# Delete - key from cache, since it's been updated!
|
||||||
|
# key updated - a new model could have been added to this key. it should not block requests after this is done
|
||||||
|
user_api_key_cache.delete_cache(key)
|
||||||
|
hashed_token = hash_token(key)
|
||||||
|
user_api_key_cache.delete_cache(hashed_token)
|
||||||
|
|
||||||
return {"key": key, **response["data"]}
|
return {"key": key, **response["data"]}
|
||||||
# update based on remaining passed in values
|
# update based on remaining passed in values
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue