From 497f4bb38d83c06488e19bd1465e66c398e2ec29 Mon Sep 17 00:00:00 2001 From: lj Date: Thu, 16 May 2024 16:33:21 +0800 Subject: [PATCH 01/10] Replace root_validator with model_validator --- litellm/proxy/_types.py | 38 +++++++++++++++++++++++++------------- 1 file changed, 25 insertions(+), 13 deletions(-) diff --git a/litellm/proxy/_types.py b/litellm/proxy/_types.py index d6bf49dca..42b9b3618 100644 --- a/litellm/proxy/_types.py +++ b/litellm/proxy/_types.py @@ -1,4 +1,4 @@ -from pydantic import BaseModel, Extra, Field, root_validator, Json, validator +from pydantic import BaseModel, Extra, Field, model_validator, Json, validator from dataclasses import fields import enum from typing import Optional, List, Union, Dict, Literal, Any @@ -240,7 +240,8 @@ class LiteLLMPromptInjectionParams(LiteLLMBase): llm_api_system_prompt: Optional[str] = None llm_api_fail_call_string: Optional[str] = None - @root_validator(pre=True) + @model_validator(mode="before") + @classmethod def check_llm_api_params(cls, values): llm_api_check = values.get("llm_api_check") if llm_api_check is True: @@ -330,7 +331,8 @@ class ModelInfo(LiteLLMBase): extra = Extra.allow # Allow extra fields protected_namespaces = () - @root_validator(pre=True) + @model_validator(mode="before") + @classmethod def set_model_info(cls, values): if values.get("id") is None: values.update({"id": str(uuid.uuid4())}) @@ -359,7 +361,8 @@ class ModelParams(LiteLLMBase): class Config: protected_namespaces = () - @root_validator(pre=True) + @model_validator(mode="before") + @classmethod def set_model_info(cls, values): if values.get("model_info") is None: values.update({"model_info": ModelInfo()}) @@ -406,7 +409,8 @@ class GenerateKeyResponse(GenerateKeyRequest): user_id: Optional[str] = None token_id: Optional[str] = None - @root_validator(pre=True) + @model_validator(mode="before") + @classmethod def set_model_info(cls, values): if values.get("token") is not None: values.update({"key": values.get("token")}) @@ -475,7 +479,8 @@ class UpdateUserRequest(GenerateRequestBase): user_role: Optional[str] = None max_budget: Optional[float] = None - @root_validator(pre=True) + @model_validator(mode="before") + @classmethod def check_user_info(cls, values): if values.get("user_id") is None and values.get("user_email") is None: raise ValueError("Either user id or user email must be provided") @@ -495,7 +500,8 @@ class NewEndUserRequest(LiteLLMBase): None # if no equivalent model in allowed region - default all requests to this model ) - @root_validator(pre=True) + @model_validator(mode="before") + @classmethod def check_user_info(cls, values): if values.get("max_budget") is not None and values.get("budget_id") is not None: raise ValueError("Set either 'max_budget' or 'budget_id', not both.") @@ -508,7 +514,8 @@ class Member(LiteLLMBase): user_id: Optional[str] = None user_email: Optional[str] = None - @root_validator(pre=True) + @model_validator(mode="before") + @classmethod def check_user_info(cls, values): if values.get("user_id") is None and values.get("user_email") is None: raise ValueError("Either user id or user email must be provided") @@ -553,7 +560,8 @@ class TeamMemberDeleteRequest(LiteLLMBase): user_id: Optional[str] = None user_email: Optional[str] = None - @root_validator(pre=True) + @model_validator(mode="before") + @classmethod def check_user_info(cls, values): if values.get("user_id") is None and values.get("user_email") is None: raise ValueError("Either user id or user email must be provided") @@ -590,7 +598,8 @@ class LiteLLM_TeamTable(TeamBase): class Config: protected_namespaces = () - @root_validator(pre=True) + @model_validator(mode="before") + @classmethod def set_model_info(cls, values): dict_fields = [ "metadata", @@ -908,7 +917,8 @@ class UserAPIKeyAuth( user_role: Optional[Literal["proxy_admin", "app_owner", "app_user"]] = None allowed_model_region: Optional[Literal["eu"]] = None - @root_validator(pre=True) + @model_validator(mode="before") + @classmethod def check_api_key(cls, values): if values.get("api_key") is not None: values.update({"token": hash_token(values.get("api_key"))}) @@ -935,7 +945,8 @@ class LiteLLM_UserTable(LiteLLMBase): tpm_limit: Optional[int] = None rpm_limit: Optional[int] = None - @root_validator(pre=True) + @model_validator(mode="before") + @classmethod def set_model_info(cls, values): if values.get("spend") is None: values.update({"spend": 0.0}) @@ -956,7 +967,8 @@ class LiteLLM_EndUserTable(LiteLLMBase): default_model: Optional[str] = None litellm_budget_table: Optional[LiteLLM_BudgetTable] = None - @root_validator(pre=True) + @model_validator(mode="before") + @classmethod def set_model_info(cls, values): if values.get("spend") is None: values.update({"spend": 0.0}) From 6a60bfbd972ff52951af8b8d44414894f53ee69c Mon Sep 17 00:00:00 2001 From: lj Date: Thu, 16 May 2024 16:39:37 +0800 Subject: [PATCH 02/10] Update model config in utils.py --- litellm/utils.py | 6 ++---- 1 file changed, 2 insertions(+), 4 deletions(-) diff --git a/litellm/utils.py b/litellm/utils.py index 36f4ad481..cac3ac865 100644 --- a/litellm/utils.py +++ b/litellm/utils.py @@ -19,7 +19,7 @@ from functools import wraps, lru_cache import datetime, time import tiktoken import uuid -from pydantic import BaseModel +from pydantic import BaseModel, ConfigDict import aiohttp import textwrap import logging @@ -332,9 +332,7 @@ class HiddenParams(OpenAIObject): model_id: Optional[str] = None # used in Router for individual deployments api_base: Optional[str] = None # returns api base used for making completion call - class Config: - extra = "allow" - protected_namespaces = () + model_config: ConfigDict = ConfigDict(extra="allow", protected_namespaces=()) def get(self, key, default=None): # Custom .get() method to access attributes with a default value if the attribute doesn't exist From 665b224226333e2c9ccffb0b1866f2931de03367 Mon Sep 17 00:00:00 2001 From: lj Date: Thu, 16 May 2024 16:42:41 +0800 Subject: [PATCH 03/10] Update model config in _types.py --- litellm/proxy/_types.py | 45 ++++++++++++++--------------------------- 1 file changed, 15 insertions(+), 30 deletions(-) diff --git a/litellm/proxy/_types.py b/litellm/proxy/_types.py index 42b9b3618..84b0a5833 100644 --- a/litellm/proxy/_types.py +++ b/litellm/proxy/_types.py @@ -1,4 +1,4 @@ -from pydantic import BaseModel, Extra, Field, model_validator, Json, validator +from pydantic import BaseModel, Extra, Field, model_validator, Json, ConfigDict from dataclasses import fields import enum from typing import Optional, List, Union, Dict, Literal, Any @@ -35,8 +35,7 @@ class LiteLLMBase(BaseModel): # if using pydantic v1 return self.__fields_set__ - class Config: - protected_namespaces = () + model_config: ConfigDict = ConfigDict(protected_namespaces=()) class LiteLLM_UpperboundKeyGenerateParams(LiteLLMBase): @@ -299,8 +298,7 @@ class ProxyChatCompletionRequest(LiteLLMBase): deployment_id: Optional[str] = None request_timeout: Optional[int] = None - class Config: - extra = "allow" # allow params not defined here, these fall in litellm.completion(**kwargs) + model_config: ConfigDict = ConfigDict(extra="allow") # allow params not defined here, these fall in litellm.completion(**kwargs) class ModelInfoDelete(LiteLLMBase): @@ -327,9 +325,7 @@ class ModelInfo(LiteLLMBase): ] ] - class Config: - extra = Extra.allow # Allow extra fields - protected_namespaces = () + model_config: ConfigDict = ConfigDict(protected_namespaces=(), extra="allow") @model_validator(mode="before") @classmethod @@ -358,8 +354,7 @@ class ModelParams(LiteLLMBase): litellm_params: dict model_info: ModelInfo - class Config: - protected_namespaces = () + model_config: ConfigDict = ConfigDict(protected_namespaces=()) @model_validator(mode="before") @classmethod @@ -398,8 +393,7 @@ class GenerateKeyRequest(GenerateRequestBase): {} ) # {"gpt-4": 5.0, "gpt-3.5-turbo": 5.0}, defaults to {} - class Config: - protected_namespaces = () + model_config: ConfigDict = ConfigDict(protected_namespaces=()) class GenerateKeyResponse(GenerateKeyRequest): @@ -450,8 +444,7 @@ class LiteLLM_ModelTable(LiteLLMBase): created_by: str updated_by: str - class Config: - protected_namespaces = () + model_config: ConfigDict = ConfigDict(protected_namespaces=()) class NewUserRequest(GenerateKeyRequest): @@ -540,8 +533,7 @@ class TeamBase(LiteLLMBase): class NewTeamRequest(TeamBase): model_aliases: Optional[dict] = None - class Config: - protected_namespaces = () + model_config: ConfigDict = ConfigDict(protected_namespaces=()) class GlobalEndUsersSpend(LiteLLMBase): @@ -595,8 +587,7 @@ class LiteLLM_TeamTable(TeamBase): budget_reset_at: Optional[datetime] = None model_id: Optional[int] = None - class Config: - protected_namespaces = () + model_config: ConfigDict = ConfigDict(protected_namespaces=()) @model_validator(mode="before") @classmethod @@ -635,8 +626,7 @@ class LiteLLM_BudgetTable(LiteLLMBase): model_max_budget: Optional[dict] = None budget_duration: Optional[str] = None - class Config: - protected_namespaces = () + model_config: ConfigDict = ConfigDict(protected_namespaces=()) class NewOrganizationRequest(LiteLLM_BudgetTable): @@ -686,8 +676,7 @@ class KeyManagementSettings(LiteLLMBase): class TeamDefaultSettings(LiteLLMBase): team_id: str - class Config: - extra = "allow" # allow params not defined here, these fall in litellm.completion(**kwargs) + model_config: ConfigDict = ConfigDict(extra="allow") # allow params not defined here, these fall in litellm.completion(**kwargs) class DynamoDBArgs(LiteLLMBase): @@ -851,8 +840,7 @@ class ConfigYAML(LiteLLMBase): description="litellm router object settings. See router.py __init__ for all, example router.num_retries=5, router.timeout=5, router.max_retries=5, router.retry_after=5", ) - class Config: - protected_namespaces = () + model_config: ConfigDict = ConfigDict(protected_namespaces=()) class LiteLLM_VerificationToken(LiteLLMBase): @@ -886,8 +874,7 @@ class LiteLLM_VerificationToken(LiteLLMBase): user_id_rate_limits: Optional[dict] = None team_id_rate_limits: Optional[dict] = None - class Config: - protected_namespaces = () + model_config: ConfigDict = ConfigDict(protected_namespaces=()) class LiteLLM_VerificationTokenView(LiteLLM_VerificationToken): @@ -954,8 +941,7 @@ class LiteLLM_UserTable(LiteLLMBase): values.update({"models": []}) return values - class Config: - protected_namespaces = () + model_config: ConfigDict = ConfigDict(protected_namespaces=()) class LiteLLM_EndUserTable(LiteLLMBase): @@ -974,8 +960,7 @@ class LiteLLM_EndUserTable(LiteLLMBase): values.update({"spend": 0.0}) return values - class Config: - protected_namespaces = () + model_config: ConfigDict = ConfigDict(protected_namespaces=()) class LiteLLM_SpendLogs(LiteLLMBase): From 603705661a6590d100516e8e741c256a2a7b0082 Mon Sep 17 00:00:00 2001 From: lj Date: Thu, 16 May 2024 16:51:36 +0800 Subject: [PATCH 04/10] Update model config in test_config.py --- litellm/tests/test_config.py | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/litellm/tests/test_config.py b/litellm/tests/test_config.py index e38187e0e..cb7a9f484 100644 --- a/litellm/tests/test_config.py +++ b/litellm/tests/test_config.py @@ -13,7 +13,7 @@ sys.path.insert( 0, os.path.abspath("../..") ) # Adds the parent directory to the, system path import pytest, litellm -from pydantic import BaseModel +from pydantic import BaseModel, ConfigDict from litellm.proxy.proxy_server import ProxyConfig from litellm.proxy.utils import encrypt_value, ProxyLogging, DualCache from litellm.types.router import Deployment, LiteLLM_Params, ModelInfo @@ -26,8 +26,7 @@ class DBModel(BaseModel): model_info: dict litellm_params: dict - class Config: - protected_namespaces = () + config_dict: ConfigDict = ConfigDict(protected_namespaces=()) @pytest.mark.asyncio From 64f40385203934e0238560d56b3394ebad6b60ff Mon Sep 17 00:00:00 2001 From: lj Date: Thu, 16 May 2024 16:52:21 +0800 Subject: [PATCH 05/10] Update model config in completion.py --- litellm/types/completion.py | 6 ++---- 1 file changed, 2 insertions(+), 4 deletions(-) diff --git a/litellm/types/completion.py b/litellm/types/completion.py index 78af7667b..36d6cf994 100644 --- a/litellm/types/completion.py +++ b/litellm/types/completion.py @@ -1,6 +1,6 @@ from typing import List, Optional, Union, Iterable -from pydantic import BaseModel, validator +from pydantic import BaseModel, ConfigDict, validator from typing_extensions import Literal, Required, TypedDict @@ -191,6 +191,4 @@ class CompletionRequest(BaseModel): api_key: Optional[str] = None model_list: Optional[List[str]] = None - class Config: - extra = "allow" - protected_namespaces = () + model_config: ConfigDict = ConfigDict(protected_namespaces=(), extra="allow") From 5945bb5ed28d0da04b3f7e7f8114cdcaddba481b Mon Sep 17 00:00:00 2001 From: lj Date: Thu, 16 May 2024 16:52:44 +0800 Subject: [PATCH 06/10] Update model config in embedding.py --- litellm/types/embedding.py | 6 ++---- 1 file changed, 2 insertions(+), 4 deletions(-) diff --git a/litellm/types/embedding.py b/litellm/types/embedding.py index 9db0ef290..c0af79b99 100644 --- a/litellm/types/embedding.py +++ b/litellm/types/embedding.py @@ -1,6 +1,6 @@ from typing import List, Optional, Union -from pydantic import BaseModel, validator +from pydantic import BaseModel, ConfigDict class EmbeddingRequest(BaseModel): @@ -18,6 +18,4 @@ class EmbeddingRequest(BaseModel): litellm_logging_obj: Optional[dict] = None logger_fn: Optional[str] = None - class Config: - # allow kwargs - extra = "allow" + model_config: ConfigDict = ConfigDict(extra="allow") From 7c31eccdc23719503bbda28f7a4f0fc0f5429c6e Mon Sep 17 00:00:00 2001 From: lj Date: Thu, 16 May 2024 16:53:05 +0800 Subject: [PATCH 07/10] Update model config in router.py --- litellm/types/router.py | 32 +++++++++++--------------------- 1 file changed, 11 insertions(+), 21 deletions(-) diff --git a/litellm/types/router.py b/litellm/types/router.py index 68ee387fe..c9c74ac20 100644 --- a/litellm/types/router.py +++ b/litellm/types/router.py @@ -1,6 +1,6 @@ from typing import List, Optional, Union, Dict, Tuple, Literal, TypedDict import httpx -from pydantic import BaseModel, validator, Field +from pydantic import BaseModel, ConfigDict, validator, Field from .completion import CompletionRequest from .embedding import EmbeddingRequest import uuid, enum @@ -12,8 +12,7 @@ class ModelConfig(BaseModel): tpm: int rpm: int - class Config: - protected_namespaces = () + model_config: ConfigDict = ConfigDict(protected_namespaces=()) class RouterConfig(BaseModel): @@ -44,8 +43,7 @@ class RouterConfig(BaseModel): "latency-based-routing", ] = "simple-shuffle" - class Config: - protected_namespaces = () + model_config: ConfigDict = ConfigDict(protected_namespaces=()) class UpdateRouterConfig(BaseModel): @@ -65,8 +63,7 @@ class UpdateRouterConfig(BaseModel): fallbacks: Optional[List[dict]] = None context_window_fallbacks: Optional[List[dict]] = None - class Config: - protected_namespaces = () + model_config: ConfigDict = ConfigDict(protected_namespaces=()) class ModelInfo(BaseModel): @@ -84,8 +81,7 @@ class ModelInfo(BaseModel): id = str(id) super().__init__(id=id, **params) - class Config: - extra = "allow" + model_config: ConfigDict = ConfigDict(extra="allow") def __contains__(self, key): # Define custom behavior for the 'in' operator @@ -139,6 +135,8 @@ class GenericLiteLLMParams(BaseModel): output_cost_per_token: Optional[float] = None input_cost_per_second: Optional[float] = None output_cost_per_second: Optional[float] = None + + model_config: ConfigDict = ConfigDict(extra="allow", arbitrary_types_allowed=True) def __init__( self, @@ -180,10 +178,6 @@ class GenericLiteLLMParams(BaseModel): max_retries = int(max_retries) # cast to int super().__init__(max_retries=max_retries, **args, **params) - class Config: - extra = "allow" - arbitrary_types_allowed = True - def __contains__(self, key): # Define custom behavior for the 'in' operator return hasattr(self, key) @@ -207,6 +201,7 @@ class LiteLLM_Params(GenericLiteLLMParams): """ model: str + model_config: ConfigDict = ConfigDict(extra="allow", arbitrary_types_allowed=True) def __init__( self, @@ -241,9 +236,6 @@ class LiteLLM_Params(GenericLiteLLMParams): max_retries = int(max_retries) # cast to int super().__init__(max_retries=max_retries, **args, **params) - class Config: - extra = "allow" - arbitrary_types_allowed = True def __contains__(self, key): # Define custom behavior for the 'in' operator @@ -273,8 +265,7 @@ class updateDeployment(BaseModel): litellm_params: Optional[updateLiteLLMParams] = None model_info: Optional[ModelInfo] = None - class Config: - protected_namespaces = () + model_config: ConfigDict = ConfigDict(protected_namespaces=()) class LiteLLMParamsTypedDict(TypedDict, total=False): @@ -322,6 +313,8 @@ class Deployment(BaseModel): model_name: str litellm_params: LiteLLM_Params model_info: ModelInfo + + model_config: ConfigDict = ConfigDict(extra="allow", protected_namespaces=()) def __init__( self, @@ -348,9 +341,6 @@ class Deployment(BaseModel): # if using pydantic v1 return self.dict(**kwargs) - class Config: - extra = "allow" - protected_namespaces = () def __contains__(self, key): # Define custom behavior for the 'in' operator From f3d0f003fb4319ae12c79fdb1297144707c5e66b Mon Sep 17 00:00:00 2001 From: lj Date: Fri, 17 May 2024 10:39:00 +0800 Subject: [PATCH 08/10] Removed config dict type definition --- litellm/proxy/_types.py | 28 ++++++++++++++-------------- litellm/types/completion.py | 2 +- litellm/types/embedding.py | 2 +- litellm/types/router.py | 16 ++++++++-------- litellm/utils.py | 2 +- 5 files changed, 25 insertions(+), 25 deletions(-) diff --git a/litellm/proxy/_types.py b/litellm/proxy/_types.py index 84b0a5833..11bd2e77c 100644 --- a/litellm/proxy/_types.py +++ b/litellm/proxy/_types.py @@ -35,7 +35,7 @@ class LiteLLMBase(BaseModel): # if using pydantic v1 return self.__fields_set__ - model_config: ConfigDict = ConfigDict(protected_namespaces=()) + model_config = ConfigDict(protected_namespaces=()) class LiteLLM_UpperboundKeyGenerateParams(LiteLLMBase): @@ -298,7 +298,7 @@ class ProxyChatCompletionRequest(LiteLLMBase): deployment_id: Optional[str] = None request_timeout: Optional[int] = None - model_config: ConfigDict = ConfigDict(extra="allow") # allow params not defined here, these fall in litellm.completion(**kwargs) + model_config = ConfigDict(extra="allow") # allow params not defined here, these fall in litellm.completion(**kwargs) class ModelInfoDelete(LiteLLMBase): @@ -325,7 +325,7 @@ class ModelInfo(LiteLLMBase): ] ] - model_config: ConfigDict = ConfigDict(protected_namespaces=(), extra="allow") + model_config = ConfigDict(protected_namespaces=(), extra="allow") @model_validator(mode="before") @classmethod @@ -354,7 +354,7 @@ class ModelParams(LiteLLMBase): litellm_params: dict model_info: ModelInfo - model_config: ConfigDict = ConfigDict(protected_namespaces=()) + model_config = ConfigDict(protected_namespaces=()) @model_validator(mode="before") @classmethod @@ -393,7 +393,7 @@ class GenerateKeyRequest(GenerateRequestBase): {} ) # {"gpt-4": 5.0, "gpt-3.5-turbo": 5.0}, defaults to {} - model_config: ConfigDict = ConfigDict(protected_namespaces=()) + model_config = ConfigDict(protected_namespaces=()) class GenerateKeyResponse(GenerateKeyRequest): @@ -444,7 +444,7 @@ class LiteLLM_ModelTable(LiteLLMBase): created_by: str updated_by: str - model_config: ConfigDict = ConfigDict(protected_namespaces=()) + model_config = ConfigDict(protected_namespaces=()) class NewUserRequest(GenerateKeyRequest): @@ -533,7 +533,7 @@ class TeamBase(LiteLLMBase): class NewTeamRequest(TeamBase): model_aliases: Optional[dict] = None - model_config: ConfigDict = ConfigDict(protected_namespaces=()) + model_config = ConfigDict(protected_namespaces=()) class GlobalEndUsersSpend(LiteLLMBase): @@ -587,7 +587,7 @@ class LiteLLM_TeamTable(TeamBase): budget_reset_at: Optional[datetime] = None model_id: Optional[int] = None - model_config: ConfigDict = ConfigDict(protected_namespaces=()) + model_config = ConfigDict(protected_namespaces=()) @model_validator(mode="before") @classmethod @@ -626,7 +626,7 @@ class LiteLLM_BudgetTable(LiteLLMBase): model_max_budget: Optional[dict] = None budget_duration: Optional[str] = None - model_config: ConfigDict = ConfigDict(protected_namespaces=()) + model_config = ConfigDict(protected_namespaces=()) class NewOrganizationRequest(LiteLLM_BudgetTable): @@ -676,7 +676,7 @@ class KeyManagementSettings(LiteLLMBase): class TeamDefaultSettings(LiteLLMBase): team_id: str - model_config: ConfigDict = ConfigDict(extra="allow") # allow params not defined here, these fall in litellm.completion(**kwargs) + model_config = ConfigDict(extra="allow") # allow params not defined here, these fall in litellm.completion(**kwargs) class DynamoDBArgs(LiteLLMBase): @@ -840,7 +840,7 @@ class ConfigYAML(LiteLLMBase): description="litellm router object settings. See router.py __init__ for all, example router.num_retries=5, router.timeout=5, router.max_retries=5, router.retry_after=5", ) - model_config: ConfigDict = ConfigDict(protected_namespaces=()) + model_config = ConfigDict(protected_namespaces=()) class LiteLLM_VerificationToken(LiteLLMBase): @@ -874,7 +874,7 @@ class LiteLLM_VerificationToken(LiteLLMBase): user_id_rate_limits: Optional[dict] = None team_id_rate_limits: Optional[dict] = None - model_config: ConfigDict = ConfigDict(protected_namespaces=()) + model_config = ConfigDict(protected_namespaces=()) class LiteLLM_VerificationTokenView(LiteLLM_VerificationToken): @@ -941,7 +941,7 @@ class LiteLLM_UserTable(LiteLLMBase): values.update({"models": []}) return values - model_config: ConfigDict = ConfigDict(protected_namespaces=()) + model_config = ConfigDict(protected_namespaces=()) class LiteLLM_EndUserTable(LiteLLMBase): @@ -960,7 +960,7 @@ class LiteLLM_EndUserTable(LiteLLMBase): values.update({"spend": 0.0}) return values - model_config: ConfigDict = ConfigDict(protected_namespaces=()) + model_config = ConfigDict(protected_namespaces=()) class LiteLLM_SpendLogs(LiteLLMBase): diff --git a/litellm/types/completion.py b/litellm/types/completion.py index 36d6cf994..c8ddc7449 100644 --- a/litellm/types/completion.py +++ b/litellm/types/completion.py @@ -191,4 +191,4 @@ class CompletionRequest(BaseModel): api_key: Optional[str] = None model_list: Optional[List[str]] = None - model_config: ConfigDict = ConfigDict(protected_namespaces=(), extra="allow") + model_config = ConfigDict(protected_namespaces=(), extra="allow") diff --git a/litellm/types/embedding.py b/litellm/types/embedding.py index c0af79b99..f8fdebc53 100644 --- a/litellm/types/embedding.py +++ b/litellm/types/embedding.py @@ -18,4 +18,4 @@ class EmbeddingRequest(BaseModel): litellm_logging_obj: Optional[dict] = None logger_fn: Optional[str] = None - model_config: ConfigDict = ConfigDict(extra="allow") + model_config = ConfigDict(extra="allow") diff --git a/litellm/types/router.py b/litellm/types/router.py index c9c74ac20..189988a61 100644 --- a/litellm/types/router.py +++ b/litellm/types/router.py @@ -12,7 +12,7 @@ class ModelConfig(BaseModel): tpm: int rpm: int - model_config: ConfigDict = ConfigDict(protected_namespaces=()) + model_config = ConfigDict(protected_namespaces=()) class RouterConfig(BaseModel): @@ -43,7 +43,7 @@ class RouterConfig(BaseModel): "latency-based-routing", ] = "simple-shuffle" - model_config: ConfigDict = ConfigDict(protected_namespaces=()) + model_config = ConfigDict(protected_namespaces=()) class UpdateRouterConfig(BaseModel): @@ -63,7 +63,7 @@ class UpdateRouterConfig(BaseModel): fallbacks: Optional[List[dict]] = None context_window_fallbacks: Optional[List[dict]] = None - model_config: ConfigDict = ConfigDict(protected_namespaces=()) + model_config = ConfigDict(protected_namespaces=()) class ModelInfo(BaseModel): @@ -81,7 +81,7 @@ class ModelInfo(BaseModel): id = str(id) super().__init__(id=id, **params) - model_config: ConfigDict = ConfigDict(extra="allow") + model_config = ConfigDict(extra="allow") def __contains__(self, key): # Define custom behavior for the 'in' operator @@ -136,7 +136,7 @@ class GenericLiteLLMParams(BaseModel): input_cost_per_second: Optional[float] = None output_cost_per_second: Optional[float] = None - model_config: ConfigDict = ConfigDict(extra="allow", arbitrary_types_allowed=True) + model_config = ConfigDict(extra="allow", arbitrary_types_allowed=True) def __init__( self, @@ -201,7 +201,7 @@ class LiteLLM_Params(GenericLiteLLMParams): """ model: str - model_config: ConfigDict = ConfigDict(extra="allow", arbitrary_types_allowed=True) + model_config = ConfigDict(extra="allow", arbitrary_types_allowed=True) def __init__( self, @@ -265,7 +265,7 @@ class updateDeployment(BaseModel): litellm_params: Optional[updateLiteLLMParams] = None model_info: Optional[ModelInfo] = None - model_config: ConfigDict = ConfigDict(protected_namespaces=()) + model_config = ConfigDict(protected_namespaces=()) class LiteLLMParamsTypedDict(TypedDict, total=False): @@ -314,7 +314,7 @@ class Deployment(BaseModel): litellm_params: LiteLLM_Params model_info: ModelInfo - model_config: ConfigDict = ConfigDict(extra="allow", protected_namespaces=()) + model_config = ConfigDict(extra="allow", protected_namespaces=()) def __init__( self, diff --git a/litellm/utils.py b/litellm/utils.py index cac3ac865..53a2d31ce 100644 --- a/litellm/utils.py +++ b/litellm/utils.py @@ -332,7 +332,7 @@ class HiddenParams(OpenAIObject): model_id: Optional[str] = None # used in Router for individual deployments api_base: Optional[str] = None # returns api base used for making completion call - model_config: ConfigDict = ConfigDict(extra="allow", protected_namespaces=()) + model_config = ConfigDict(extra="allow", protected_namespaces=()) def get(self, key, default=None): # Custom .get() method to access attributes with a default value if the attribute doesn't exist From 1de5aa30af38bc6d89d55602109e3879fea567f0 Mon Sep 17 00:00:00 2001 From: lj Date: Fri, 17 May 2024 10:39:36 +0800 Subject: [PATCH 09/10] Add pydantic plugin to mypy to eliminate incorrect lint errors --- pyproject.toml | 2 ++ 1 file changed, 2 insertions(+) diff --git a/pyproject.toml b/pyproject.toml index 55e88c30a..e38958ea1 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -84,3 +84,5 @@ version_files = [ "pyproject.toml:^version" ] +[tool.mypy] +plugins = "pydantic.mypy" From 1ff3afc1aef022ffc0d6759cd944063142f70768 Mon Sep 17 00:00:00 2001 From: lj Date: Fri, 31 May 2024 11:37:57 +0800 Subject: [PATCH 10/10] Fix class config deprecation warning --- litellm/proxy/_types.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/litellm/proxy/_types.py b/litellm/proxy/_types.py index 859840619..6df6b4fe4 100644 --- a/litellm/proxy/_types.py +++ b/litellm/proxy/_types.py @@ -839,8 +839,7 @@ class LiteLLM_TeamMemberTable(LiteLLM_BudgetTable): team_id: Optional[str] = None budget_id: Optional[str] = None - class Config: - protected_namespaces = () + model_config = ConfigDict(protected_namespaces=()) class NewOrganizationRequest(LiteLLM_BudgetTable):