fix: strip llm credential before returning

This commit is contained in:
John HU 2024-06-17 12:14:40 -07:00 committed by GitHub
parent 092040afec
commit 3d16bd6eb8
No known key found for this signature in database
GPG key ID: B5690EEEBB952194

View file

@ -6057,8 +6057,11 @@ async def model_info_v2(
model_info[k] = v
_model["model_info"] = model_info
# don't return the api key / vertex credentials
# don't return the llm credentials
_model["litellm_params"].pop("api_key", None)
_model["litellm_params"].pop("vertex_credentials", None)
_model["litellm_params"].pop("aws_access_key_id", None)
_model["litellm_params"].pop("aws_secret_access_key", None)
verbose_proxy_logger.debug("all_models: %s", all_models)
return {"data": all_models}
@ -6570,8 +6573,11 @@ async def model_info_v1(
if k not in model_info:
model_info[k] = v
model["model_info"] = model_info
# don't return the api key
# don't return the llm credentials
model["litellm_params"].pop("api_key", None)
model["litellm_params"].pop("vertex_credentials", None)
model["litellm_params"].pop("aws_access_key_id", None)
model["litellm_params"].pop("aws_secret_access_key", None)
verbose_proxy_logger.debug("all_models: %s", all_models)
return {"data": all_models}