diff --git a/litellm/router.py b/litellm/router.py index 15fdbd4b8..7acf75e8e 100644 --- a/litellm/router.py +++ b/litellm/router.py @@ -2590,6 +2590,16 @@ class Router: return model return None + def get_model_info(self, id: str) -> Optional[dict]: + """ + For a given model id, return the model info + """ + for model in self.model_list: + if "model_info" in model and "id" in model["model_info"]: + if id == model["model_info"]["id"]: + return model + return None + def get_model_ids(self): ids = [] for model in self.model_list: @@ -2904,15 +2914,10 @@ class Router: m for m in self.model_list if m["litellm_params"]["model"] == model ] - verbose_router_logger.debug( - f"initial list of deployments: {healthy_deployments}" - ) + litellm.print_verbose(f"initial list of deployments: {healthy_deployments}") - verbose_router_logger.debug( - f"healthy deployments: length {len(healthy_deployments)} {healthy_deployments}" - ) if len(healthy_deployments) == 0: - raise ValueError(f"No healthy deployment available, passed model={model}") + raise ValueError(f"No healthy deployment available, passed model={model}. ") if litellm.model_alias_map and model in litellm.model_alias_map: model = litellm.model_alias_map[ model diff --git a/litellm/router_strategy/lowest_tpm_rpm_v2.py b/litellm/router_strategy/lowest_tpm_rpm_v2.py index 4bcf1eec1..f7a55d970 100644 --- a/litellm/router_strategy/lowest_tpm_rpm_v2.py +++ b/litellm/router_strategy/lowest_tpm_rpm_v2.py @@ -79,10 +79,12 @@ class LowestTPMLoggingHandler_v2(CustomLogger): model=deployment.get("litellm_params", {}).get("model"), response=httpx.Response( status_code=429, - content="{} rpm limit={}. current usage={}".format( + content="{} rpm limit={}. current usage={}. id={}, model_group={}. Get the model info by calling 'router.get_model_info(id)".format( RouterErrors.user_defined_ratelimit_error.value, deployment_rpm, local_result, + model_id, + deployment.get("model_name", ""), ), request=httpx.Request(method="tpm_rpm_limits", url="https://github.com/BerriAI/litellm"), # type: ignore ),