From b5f883ab74ffa5e6fd92dc89b00e200c756e1c0c Mon Sep 17 00:00:00 2001 From: Ishaan Jaff Date: Mon, 27 May 2024 08:49:51 -0700 Subject: [PATCH] feat - show openai params on model hub ui --- litellm/router.py | 12 ++++++++---- litellm/types/router.py | 1 + litellm/types/utils.py | 5 +++++ 3 files changed, 14 insertions(+), 4 deletions(-) diff --git a/litellm/router.py b/litellm/router.py index 3243e09fa..384c7f338 100644 --- a/litellm/router.py +++ b/litellm/router.py @@ -3072,7 +3072,7 @@ class Router: model=litellm_params.model, custom_llm_provider=litellm_params.custom_llm_provider, ) - except Exception as e: + except litellm.exceptions.BadRequestError as e: continue if model_group_info is None: @@ -3124,19 +3124,23 @@ class Router: if ( model_info.get("supports_parallel_function_calling", None) is not None - and model_info["supports_parallel_function_calling"] == True # type: ignore + and model_info["supports_parallel_function_calling"] is True # type: ignore ): model_group_info.supports_parallel_function_calling = True if ( model_info.get("supports_vision", None) is not None - and model_info["supports_vision"] == True # type: ignore + and model_info["supports_vision"] is True # type: ignore ): model_group_info.supports_vision = True if ( model_info.get("supports_function_calling", None) is not None - and model_info["supports_function_calling"] == True # type: ignore + and model_info["supports_function_calling"] is True # type: ignore ): model_group_info.supports_function_calling = True + if model_info.get("supported_openai_params", None) is not None: + model_group_info.supported_openai_params = model_info[ + "supported_openai_params" + ] return model_group_info diff --git a/litellm/types/router.py b/litellm/types/router.py index 5e6f2c148..93c65a1cf 100644 --- a/litellm/types/router.py +++ b/litellm/types/router.py @@ -426,3 +426,4 @@ class ModelGroupInfo(BaseModel): supports_parallel_function_calling: bool = Field(default=False) supports_vision: bool = Field(default=False) supports_function_calling: bool = Field(default=False) + supported_openai_params: List[str] = Field(default=[]) diff --git a/litellm/types/utils.py b/litellm/types/utils.py index 5c730cca8..cc0836132 100644 --- a/litellm/types/utils.py +++ b/litellm/types/utils.py @@ -15,6 +15,10 @@ class ProviderField(TypedDict): class ModelInfo(TypedDict): + """ + Model info for a given model, this is information found in litellm.model_prices_and_context_window.json + """ + max_tokens: int max_input_tokens: int max_output_tokens: int @@ -22,3 +26,4 @@ class ModelInfo(TypedDict): output_cost_per_token: float litellm_provider: str mode: str + supported_openai_params: List[str]