diff --git a/litellm/__init__.py b/litellm/__init__.py index d66707f8b3..46f4906627 100644 --- a/litellm/__init__.py +++ b/litellm/__init__.py @@ -14,6 +14,7 @@ from litellm.types.utils import ( BudgetConfig, all_litellm_params, all_litellm_params as _litellm_completion_params, + CredentialItem, ) # maintain backwards compatibility for root param from litellm._logging import ( set_verbose, @@ -198,6 +199,8 @@ AZURE_DEFAULT_API_VERSION = "2024-08-01-preview" # this is updated to the lates WATSONX_DEFAULT_API_VERSION = "2024-03-13" ### COHERE EMBEDDINGS DEFAULT TYPE ### COHERE_DEFAULT_EMBEDDING_INPUT_TYPE: COHERE_EMBEDDING_INPUT_TYPES = "search_document" +### CREDENTIALS ### +credential_list: Optional[List[CredentialItem]] = None ### GUARDRAILS ### llamaguard_model_name: Optional[str] = None openai_moderations_model_name: Optional[str] = None diff --git a/litellm/proxy/_new_secret_config.yaml b/litellm/proxy/_new_secret_config.yaml index f3d8d55990..84b18925b2 100644 --- a/litellm/proxy/_new_secret_config.yaml +++ b/litellm/proxy/_new_secret_config.yaml @@ -6,7 +6,7 @@ model_list: credential_list: - credential_name: default_azure_credential - credentials: + credential_values: api_key: os.environ/AZURE_API_KEY api_base: os.environ/AZURE_API_BASE credential_info: diff --git a/litellm/proxy/proxy_server.py b/litellm/proxy/proxy_server.py index 99b6f4ea54..0e5ad27960 100644 --- a/litellm/proxy/proxy_server.py +++ b/litellm/proxy/proxy_server.py @@ -287,7 +287,7 @@ from litellm.types.llms.openai import HttpxBinaryResponseContent from litellm.types.router import DeploymentTypedDict from litellm.types.router import ModelInfo as RouterModelInfo from litellm.types.router import RouterGeneralSettings, updateDeployment -from litellm.types.utils import CustomHuggingfaceTokenizer +from litellm.types.utils import CredentialItem, CustomHuggingfaceTokenizer from litellm.types.utils import ModelInfo as ModelMapInfo from litellm.types.utils import RawRequestTypedDict, StandardLoggingPayload from litellm.utils import _add_custom_logger_callback_to_specific_event @@ -2184,6 +2184,9 @@ class ProxyConfig: init_guardrails_v2( all_guardrails=guardrails_v2, config_file_path=config_file_path ) + + ## CREDENTIALS + litellm.credential_list = config.get("credential_list") return router, router.get_model_list(), general_settings def _load_alerting_settings(self, general_settings: dict): diff --git a/litellm/types/utils.py b/litellm/types/utils.py index 4af88100fa..d1bfaac4ef 100644 --- a/litellm/types/utils.py +++ b/litellm/types/utils.py @@ -2011,3 +2011,9 @@ class RawRequestTypedDict(TypedDict, total=False): raw_request_body: Optional[dict] raw_request_headers: Optional[dict] error: Optional[str] + + +class CredentialItem(BaseModel): + credential_name: str + credential_values: dict + credential_info: dict