diff --git a/litellm/router.py b/litellm/router.py index 0187b441f..bfd1dafe9 100644 --- a/litellm/router.py +++ b/litellm/router.py @@ -3610,7 +3610,7 @@ class Router: # get model info try: model_info = litellm.get_model_info(model=litellm_params.model) - except Exception as e: + except Exception: model_info = None # get llm provider try: @@ -3619,7 +3619,9 @@ class Router: custom_llm_provider=litellm_params.custom_llm_provider, ) except litellm.exceptions.BadRequestError as e: - continue + verbose_router_logger.error( + "litellm.router.py::get_model_group_info() - {}".format(str(e)) + ) if model_info is None: supported_openai_params = litellm.get_supported_openai_params( diff --git a/litellm/tests/test_router.py b/litellm/tests/test_router.py index 02bf9a16b..9b52c7d57 100644 --- a/litellm/tests/test_router.py +++ b/litellm/tests/test_router.py @@ -1275,6 +1275,21 @@ def test_openai_completion_on_router(): # test_openai_completion_on_router() +def test_model_group_info(): + router = Router( + model_list=[ + { + "model_name": "command-r-plus", + "litellm_params": {"model": "cohere.command-r-plus-v1:0"}, + } + ] + ) + + response = router.get_model_group_info(model_group="command-r-plus") + + assert response is not None + + def test_consistent_model_id(): """ - For a given model group + litellm params, assert the model id is always the same diff --git a/litellm/types/router.py b/litellm/types/router.py index 3dbc5e75d..aa63e95f5 100644 --- a/litellm/types/router.py +++ b/litellm/types/router.py @@ -435,13 +435,15 @@ class ModelGroupInfo(BaseModel): max_output_tokens: Optional[float] = None input_cost_per_token: Optional[float] = None output_cost_per_token: Optional[float] = None - mode: Literal[ - "chat", "embedding", "completion", "image_generation", "audio_transcription" - ] + mode: Optional[ + Literal[ + "chat", "embedding", "completion", "image_generation", "audio_transcription" + ] + ] = Field(default="chat") supports_parallel_function_calling: bool = Field(default=False) supports_vision: bool = Field(default=False) supports_function_calling: bool = Field(default=False) - supported_openai_params: List[str] = Field(default=[]) + supported_openai_params: Optional[List[str]] = Field(default=[]) class AssistantsTypedDict(TypedDict):