forked from phoenix/litellm-mirror
feat - show openai params on model hub ui
This commit is contained in:
parent
3be6a07e35
commit
b5f883ab74
3 changed files with 14 additions and 4 deletions
|
@ -3072,7 +3072,7 @@ class Router:
|
||||||
model=litellm_params.model,
|
model=litellm_params.model,
|
||||||
custom_llm_provider=litellm_params.custom_llm_provider,
|
custom_llm_provider=litellm_params.custom_llm_provider,
|
||||||
)
|
)
|
||||||
except Exception as e:
|
except litellm.exceptions.BadRequestError as e:
|
||||||
continue
|
continue
|
||||||
|
|
||||||
if model_group_info is None:
|
if model_group_info is None:
|
||||||
|
@ -3124,19 +3124,23 @@ class Router:
|
||||||
if (
|
if (
|
||||||
model_info.get("supports_parallel_function_calling", None)
|
model_info.get("supports_parallel_function_calling", None)
|
||||||
is not None
|
is not None
|
||||||
and model_info["supports_parallel_function_calling"] == True # type: ignore
|
and model_info["supports_parallel_function_calling"] is True # type: ignore
|
||||||
):
|
):
|
||||||
model_group_info.supports_parallel_function_calling = True
|
model_group_info.supports_parallel_function_calling = True
|
||||||
if (
|
if (
|
||||||
model_info.get("supports_vision", None) is not None
|
model_info.get("supports_vision", None) is not None
|
||||||
and model_info["supports_vision"] == True # type: ignore
|
and model_info["supports_vision"] is True # type: ignore
|
||||||
):
|
):
|
||||||
model_group_info.supports_vision = True
|
model_group_info.supports_vision = True
|
||||||
if (
|
if (
|
||||||
model_info.get("supports_function_calling", None) is not None
|
model_info.get("supports_function_calling", None) is not None
|
||||||
and model_info["supports_function_calling"] == True # type: ignore
|
and model_info["supports_function_calling"] is True # type: ignore
|
||||||
):
|
):
|
||||||
model_group_info.supports_function_calling = True
|
model_group_info.supports_function_calling = True
|
||||||
|
if model_info.get("supported_openai_params", None) is not None:
|
||||||
|
model_group_info.supported_openai_params = model_info[
|
||||||
|
"supported_openai_params"
|
||||||
|
]
|
||||||
|
|
||||||
return model_group_info
|
return model_group_info
|
||||||
|
|
||||||
|
|
|
@ -426,3 +426,4 @@ class ModelGroupInfo(BaseModel):
|
||||||
supports_parallel_function_calling: bool = Field(default=False)
|
supports_parallel_function_calling: bool = Field(default=False)
|
||||||
supports_vision: bool = Field(default=False)
|
supports_vision: bool = Field(default=False)
|
||||||
supports_function_calling: bool = Field(default=False)
|
supports_function_calling: bool = Field(default=False)
|
||||||
|
supported_openai_params: List[str] = Field(default=[])
|
||||||
|
|
|
@ -15,6 +15,10 @@ class ProviderField(TypedDict):
|
||||||
|
|
||||||
|
|
||||||
class ModelInfo(TypedDict):
|
class ModelInfo(TypedDict):
|
||||||
|
"""
|
||||||
|
Model info for a given model, this is information found in litellm.model_prices_and_context_window.json
|
||||||
|
"""
|
||||||
|
|
||||||
max_tokens: int
|
max_tokens: int
|
||||||
max_input_tokens: int
|
max_input_tokens: int
|
||||||
max_output_tokens: int
|
max_output_tokens: int
|
||||||
|
@ -22,3 +26,4 @@ class ModelInfo(TypedDict):
|
||||||
output_cost_per_token: float
|
output_cost_per_token: float
|
||||||
litellm_provider: str
|
litellm_provider: str
|
||||||
mode: str
|
mode: str
|
||||||
|
supported_openai_params: List[str]
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue