feat(proxy_server.py): move credential list to being a top-level param

This commit is contained in:
Krrish Dholakia 2025-03-10 17:04:05 -07:00
parent 5458b08425
commit 4bd4bb16fd
4 changed files with 14 additions and 2 deletions

View file

@ -14,6 +14,7 @@ from litellm.types.utils import (
BudgetConfig, BudgetConfig,
all_litellm_params, all_litellm_params,
all_litellm_params as _litellm_completion_params, all_litellm_params as _litellm_completion_params,
CredentialItem,
) # maintain backwards compatibility for root param ) # maintain backwards compatibility for root param
from litellm._logging import ( from litellm._logging import (
set_verbose, set_verbose,
@ -198,6 +199,8 @@ AZURE_DEFAULT_API_VERSION = "2024-08-01-preview" # this is updated to the lates
WATSONX_DEFAULT_API_VERSION = "2024-03-13" WATSONX_DEFAULT_API_VERSION = "2024-03-13"
### COHERE EMBEDDINGS DEFAULT TYPE ### ### COHERE EMBEDDINGS DEFAULT TYPE ###
COHERE_DEFAULT_EMBEDDING_INPUT_TYPE: COHERE_EMBEDDING_INPUT_TYPES = "search_document" COHERE_DEFAULT_EMBEDDING_INPUT_TYPE: COHERE_EMBEDDING_INPUT_TYPES = "search_document"
### CREDENTIALS ###
credential_list: Optional[List[CredentialItem]] = None
### GUARDRAILS ### ### GUARDRAILS ###
llamaguard_model_name: Optional[str] = None llamaguard_model_name: Optional[str] = None
openai_moderations_model_name: Optional[str] = None openai_moderations_model_name: Optional[str] = None

View file

@ -6,7 +6,7 @@ model_list:
credential_list: credential_list:
- credential_name: default_azure_credential - credential_name: default_azure_credential
credentials: credential_values:
api_key: os.environ/AZURE_API_KEY api_key: os.environ/AZURE_API_KEY
api_base: os.environ/AZURE_API_BASE api_base: os.environ/AZURE_API_BASE
credential_info: credential_info:

View file

@ -287,7 +287,7 @@ from litellm.types.llms.openai import HttpxBinaryResponseContent
from litellm.types.router import DeploymentTypedDict from litellm.types.router import DeploymentTypedDict
from litellm.types.router import ModelInfo as RouterModelInfo from litellm.types.router import ModelInfo as RouterModelInfo
from litellm.types.router import RouterGeneralSettings, updateDeployment from litellm.types.router import RouterGeneralSettings, updateDeployment
from litellm.types.utils import CustomHuggingfaceTokenizer from litellm.types.utils import CredentialItem, CustomHuggingfaceTokenizer
from litellm.types.utils import ModelInfo as ModelMapInfo from litellm.types.utils import ModelInfo as ModelMapInfo
from litellm.types.utils import RawRequestTypedDict, StandardLoggingPayload from litellm.types.utils import RawRequestTypedDict, StandardLoggingPayload
from litellm.utils import _add_custom_logger_callback_to_specific_event from litellm.utils import _add_custom_logger_callback_to_specific_event
@ -2184,6 +2184,9 @@ class ProxyConfig:
init_guardrails_v2( init_guardrails_v2(
all_guardrails=guardrails_v2, config_file_path=config_file_path all_guardrails=guardrails_v2, config_file_path=config_file_path
) )
## CREDENTIALS
litellm.credential_list = config.get("credential_list")
return router, router.get_model_list(), general_settings return router, router.get_model_list(), general_settings
def _load_alerting_settings(self, general_settings: dict): def _load_alerting_settings(self, general_settings: dict):

View file

@ -2011,3 +2011,9 @@ class RawRequestTypedDict(TypedDict, total=False):
raw_request_body: Optional[dict] raw_request_body: Optional[dict]
raw_request_headers: Optional[dict] raw_request_headers: Optional[dict]
error: Optional[str] error: Optional[str]
class CredentialItem(BaseModel):
credential_name: str
credential_values: dict
credential_info: dict