mirror of
https://github.com/BerriAI/litellm.git
synced 2025-04-27 03:34:10 +00:00
(fix) caching config
This commit is contained in:
parent
4db51c6eae
commit
97a8177dfc
1 changed files with 1 additions and 2 deletions
|
@ -167,8 +167,7 @@ async def user_api_key_auth(request: Request):
|
||||||
if api_key == master_key:
|
if api_key == master_key:
|
||||||
return
|
return
|
||||||
if api_key in config_cache:
|
if api_key in config_cache:
|
||||||
model_list = config.get("model_list", [])
|
llm_model_list = config_cache[api_key].get("model_list", [])
|
||||||
llm_model_list = model_list
|
|
||||||
return
|
return
|
||||||
|
|
||||||
if prisma_client:
|
if prisma_client:
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue