Merge pull request #2965 from BerriAI/litellm_fix_key_update

fix - delete key from inMemory Cache after /key/update
This commit is contained in:
Ishaan Jaff 2024-04-11 18:18:05 -07:00 committed by GitHub
commit 8ba140b09e
No known key found for this signature in database
GPG key ID: B5690EEEBB952194

View file

@ -4449,6 +4449,13 @@ async def update_key_fn(request: Request, data: UpdateKeyRequest):
response = await prisma_client.update_data(
token=key, data={**non_default_values, "token": key}
)
# Delete - key from cache, since it's been updated!
# key updated - a new model could have been added to this key. it should not block requests after this is done
user_api_key_cache.delete_cache(key)
hashed_token = hash_token(key)
user_api_key_cache.delete_cache(hashed_token)
return {"key": key, **response["data"]}
# update based on remaining passed in values
except Exception as e: