forked from phoenix/litellm-mirror
Removed config dict type definition
This commit is contained in:
parent
7c31eccdc2
commit
f3d0f003fb
5 changed files with 25 additions and 25 deletions
|
@ -35,7 +35,7 @@ class LiteLLMBase(BaseModel):
|
|||
# if using pydantic v1
|
||||
return self.__fields_set__
|
||||
|
||||
model_config: ConfigDict = ConfigDict(protected_namespaces=())
|
||||
model_config = ConfigDict(protected_namespaces=())
|
||||
|
||||
|
||||
class LiteLLM_UpperboundKeyGenerateParams(LiteLLMBase):
|
||||
|
@ -298,7 +298,7 @@ class ProxyChatCompletionRequest(LiteLLMBase):
|
|||
deployment_id: Optional[str] = None
|
||||
request_timeout: Optional[int] = None
|
||||
|
||||
model_config: ConfigDict = ConfigDict(extra="allow") # allow params not defined here, these fall in litellm.completion(**kwargs)
|
||||
model_config = ConfigDict(extra="allow") # allow params not defined here, these fall in litellm.completion(**kwargs)
|
||||
|
||||
|
||||
class ModelInfoDelete(LiteLLMBase):
|
||||
|
@ -325,7 +325,7 @@ class ModelInfo(LiteLLMBase):
|
|||
]
|
||||
]
|
||||
|
||||
model_config: ConfigDict = ConfigDict(protected_namespaces=(), extra="allow")
|
||||
model_config = ConfigDict(protected_namespaces=(), extra="allow")
|
||||
|
||||
@model_validator(mode="before")
|
||||
@classmethod
|
||||
|
@ -354,7 +354,7 @@ class ModelParams(LiteLLMBase):
|
|||
litellm_params: dict
|
||||
model_info: ModelInfo
|
||||
|
||||
model_config: ConfigDict = ConfigDict(protected_namespaces=())
|
||||
model_config = ConfigDict(protected_namespaces=())
|
||||
|
||||
@model_validator(mode="before")
|
||||
@classmethod
|
||||
|
@ -393,7 +393,7 @@ class GenerateKeyRequest(GenerateRequestBase):
|
|||
{}
|
||||
) # {"gpt-4": 5.0, "gpt-3.5-turbo": 5.0}, defaults to {}
|
||||
|
||||
model_config: ConfigDict = ConfigDict(protected_namespaces=())
|
||||
model_config = ConfigDict(protected_namespaces=())
|
||||
|
||||
|
||||
class GenerateKeyResponse(GenerateKeyRequest):
|
||||
|
@ -444,7 +444,7 @@ class LiteLLM_ModelTable(LiteLLMBase):
|
|||
created_by: str
|
||||
updated_by: str
|
||||
|
||||
model_config: ConfigDict = ConfigDict(protected_namespaces=())
|
||||
model_config = ConfigDict(protected_namespaces=())
|
||||
|
||||
|
||||
class NewUserRequest(GenerateKeyRequest):
|
||||
|
@ -533,7 +533,7 @@ class TeamBase(LiteLLMBase):
|
|||
class NewTeamRequest(TeamBase):
|
||||
model_aliases: Optional[dict] = None
|
||||
|
||||
model_config: ConfigDict = ConfigDict(protected_namespaces=())
|
||||
model_config = ConfigDict(protected_namespaces=())
|
||||
|
||||
|
||||
class GlobalEndUsersSpend(LiteLLMBase):
|
||||
|
@ -587,7 +587,7 @@ class LiteLLM_TeamTable(TeamBase):
|
|||
budget_reset_at: Optional[datetime] = None
|
||||
model_id: Optional[int] = None
|
||||
|
||||
model_config: ConfigDict = ConfigDict(protected_namespaces=())
|
||||
model_config = ConfigDict(protected_namespaces=())
|
||||
|
||||
@model_validator(mode="before")
|
||||
@classmethod
|
||||
|
@ -626,7 +626,7 @@ class LiteLLM_BudgetTable(LiteLLMBase):
|
|||
model_max_budget: Optional[dict] = None
|
||||
budget_duration: Optional[str] = None
|
||||
|
||||
model_config: ConfigDict = ConfigDict(protected_namespaces=())
|
||||
model_config = ConfigDict(protected_namespaces=())
|
||||
|
||||
|
||||
class NewOrganizationRequest(LiteLLM_BudgetTable):
|
||||
|
@ -676,7 +676,7 @@ class KeyManagementSettings(LiteLLMBase):
|
|||
class TeamDefaultSettings(LiteLLMBase):
|
||||
team_id: str
|
||||
|
||||
model_config: ConfigDict = ConfigDict(extra="allow") # allow params not defined here, these fall in litellm.completion(**kwargs)
|
||||
model_config = ConfigDict(extra="allow") # allow params not defined here, these fall in litellm.completion(**kwargs)
|
||||
|
||||
|
||||
class DynamoDBArgs(LiteLLMBase):
|
||||
|
@ -840,7 +840,7 @@ class ConfigYAML(LiteLLMBase):
|
|||
description="litellm router object settings. See router.py __init__ for all, example router.num_retries=5, router.timeout=5, router.max_retries=5, router.retry_after=5",
|
||||
)
|
||||
|
||||
model_config: ConfigDict = ConfigDict(protected_namespaces=())
|
||||
model_config = ConfigDict(protected_namespaces=())
|
||||
|
||||
|
||||
class LiteLLM_VerificationToken(LiteLLMBase):
|
||||
|
@ -874,7 +874,7 @@ class LiteLLM_VerificationToken(LiteLLMBase):
|
|||
user_id_rate_limits: Optional[dict] = None
|
||||
team_id_rate_limits: Optional[dict] = None
|
||||
|
||||
model_config: ConfigDict = ConfigDict(protected_namespaces=())
|
||||
model_config = ConfigDict(protected_namespaces=())
|
||||
|
||||
|
||||
class LiteLLM_VerificationTokenView(LiteLLM_VerificationToken):
|
||||
|
@ -941,7 +941,7 @@ class LiteLLM_UserTable(LiteLLMBase):
|
|||
values.update({"models": []})
|
||||
return values
|
||||
|
||||
model_config: ConfigDict = ConfigDict(protected_namespaces=())
|
||||
model_config = ConfigDict(protected_namespaces=())
|
||||
|
||||
|
||||
class LiteLLM_EndUserTable(LiteLLMBase):
|
||||
|
@ -960,7 +960,7 @@ class LiteLLM_EndUserTable(LiteLLMBase):
|
|||
values.update({"spend": 0.0})
|
||||
return values
|
||||
|
||||
model_config: ConfigDict = ConfigDict(protected_namespaces=())
|
||||
model_config = ConfigDict(protected_namespaces=())
|
||||
|
||||
|
||||
class LiteLLM_SpendLogs(LiteLLMBase):
|
||||
|
|
|
@ -191,4 +191,4 @@ class CompletionRequest(BaseModel):
|
|||
api_key: Optional[str] = None
|
||||
model_list: Optional[List[str]] = None
|
||||
|
||||
model_config: ConfigDict = ConfigDict(protected_namespaces=(), extra="allow")
|
||||
model_config = ConfigDict(protected_namespaces=(), extra="allow")
|
||||
|
|
|
@ -18,4 +18,4 @@ class EmbeddingRequest(BaseModel):
|
|||
litellm_logging_obj: Optional[dict] = None
|
||||
logger_fn: Optional[str] = None
|
||||
|
||||
model_config: ConfigDict = ConfigDict(extra="allow")
|
||||
model_config = ConfigDict(extra="allow")
|
||||
|
|
|
@ -12,7 +12,7 @@ class ModelConfig(BaseModel):
|
|||
tpm: int
|
||||
rpm: int
|
||||
|
||||
model_config: ConfigDict = ConfigDict(protected_namespaces=())
|
||||
model_config = ConfigDict(protected_namespaces=())
|
||||
|
||||
|
||||
class RouterConfig(BaseModel):
|
||||
|
@ -43,7 +43,7 @@ class RouterConfig(BaseModel):
|
|||
"latency-based-routing",
|
||||
] = "simple-shuffle"
|
||||
|
||||
model_config: ConfigDict = ConfigDict(protected_namespaces=())
|
||||
model_config = ConfigDict(protected_namespaces=())
|
||||
|
||||
|
||||
class UpdateRouterConfig(BaseModel):
|
||||
|
@ -63,7 +63,7 @@ class UpdateRouterConfig(BaseModel):
|
|||
fallbacks: Optional[List[dict]] = None
|
||||
context_window_fallbacks: Optional[List[dict]] = None
|
||||
|
||||
model_config: ConfigDict = ConfigDict(protected_namespaces=())
|
||||
model_config = ConfigDict(protected_namespaces=())
|
||||
|
||||
|
||||
class ModelInfo(BaseModel):
|
||||
|
@ -81,7 +81,7 @@ class ModelInfo(BaseModel):
|
|||
id = str(id)
|
||||
super().__init__(id=id, **params)
|
||||
|
||||
model_config: ConfigDict = ConfigDict(extra="allow")
|
||||
model_config = ConfigDict(extra="allow")
|
||||
|
||||
def __contains__(self, key):
|
||||
# Define custom behavior for the 'in' operator
|
||||
|
@ -136,7 +136,7 @@ class GenericLiteLLMParams(BaseModel):
|
|||
input_cost_per_second: Optional[float] = None
|
||||
output_cost_per_second: Optional[float] = None
|
||||
|
||||
model_config: ConfigDict = ConfigDict(extra="allow", arbitrary_types_allowed=True)
|
||||
model_config = ConfigDict(extra="allow", arbitrary_types_allowed=True)
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
|
@ -201,7 +201,7 @@ class LiteLLM_Params(GenericLiteLLMParams):
|
|||
"""
|
||||
|
||||
model: str
|
||||
model_config: ConfigDict = ConfigDict(extra="allow", arbitrary_types_allowed=True)
|
||||
model_config = ConfigDict(extra="allow", arbitrary_types_allowed=True)
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
|
@ -265,7 +265,7 @@ class updateDeployment(BaseModel):
|
|||
litellm_params: Optional[updateLiteLLMParams] = None
|
||||
model_info: Optional[ModelInfo] = None
|
||||
|
||||
model_config: ConfigDict = ConfigDict(protected_namespaces=())
|
||||
model_config = ConfigDict(protected_namespaces=())
|
||||
|
||||
|
||||
class LiteLLMParamsTypedDict(TypedDict, total=False):
|
||||
|
@ -314,7 +314,7 @@ class Deployment(BaseModel):
|
|||
litellm_params: LiteLLM_Params
|
||||
model_info: ModelInfo
|
||||
|
||||
model_config: ConfigDict = ConfigDict(extra="allow", protected_namespaces=())
|
||||
model_config = ConfigDict(extra="allow", protected_namespaces=())
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
|
|
|
@ -332,7 +332,7 @@ class HiddenParams(OpenAIObject):
|
|||
model_id: Optional[str] = None # used in Router for individual deployments
|
||||
api_base: Optional[str] = None # returns api base used for making completion call
|
||||
|
||||
model_config: ConfigDict = ConfigDict(extra="allow", protected_namespaces=())
|
||||
model_config = ConfigDict(extra="allow", protected_namespaces=())
|
||||
|
||||
def get(self, key, default=None):
|
||||
# Custom .get() method to access attributes with a default value if the attribute doesn't exist
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue