mirror of
https://github.com/BerriAI/litellm.git
synced 2025-04-25 18:54:30 +00:00
fix supports_web_search
This commit is contained in:
parent
1d7accce9e
commit
7dd37a5b18
4 changed files with 9 additions and 0 deletions
|
@ -4928,6 +4928,11 @@ class Router:
|
||||||
and model_info["supports_function_calling"] is True # type: ignore
|
and model_info["supports_function_calling"] is True # type: ignore
|
||||||
):
|
):
|
||||||
model_group_info.supports_function_calling = True
|
model_group_info.supports_function_calling = True
|
||||||
|
if (
|
||||||
|
model_info.get("supports_web_search", None) is not None
|
||||||
|
and model_info["supports_web_search"] is True # type: ignore
|
||||||
|
):
|
||||||
|
model_group_info.supports_web_search = True
|
||||||
if (
|
if (
|
||||||
model_info.get("supported_openai_params", None) is not None
|
model_info.get("supported_openai_params", None) is not None
|
||||||
and model_info["supported_openai_params"] is not None
|
and model_info["supported_openai_params"] is not None
|
||||||
|
|
|
@ -559,6 +559,7 @@ class ModelGroupInfo(BaseModel):
|
||||||
rpm: Optional[int] = None
|
rpm: Optional[int] = None
|
||||||
supports_parallel_function_calling: bool = Field(default=False)
|
supports_parallel_function_calling: bool = Field(default=False)
|
||||||
supports_vision: bool = Field(default=False)
|
supports_vision: bool = Field(default=False)
|
||||||
|
supports_web_search: bool = Field(default=False)
|
||||||
supports_function_calling: bool = Field(default=False)
|
supports_function_calling: bool = Field(default=False)
|
||||||
supported_openai_params: Optional[List[str]] = Field(default=[])
|
supported_openai_params: Optional[List[str]] = Field(default=[])
|
||||||
configurable_clientside_auth_params: CONFIGURABLE_CLIENTSIDE_AUTH_PARAMS = None
|
configurable_clientside_auth_params: CONFIGURABLE_CLIENTSIDE_AUTH_PARAMS = None
|
||||||
|
|
|
@ -97,6 +97,7 @@ class ProviderSpecificModelInfo(TypedDict, total=False):
|
||||||
supports_pdf_input: Optional[bool]
|
supports_pdf_input: Optional[bool]
|
||||||
supports_native_streaming: Optional[bool]
|
supports_native_streaming: Optional[bool]
|
||||||
supports_parallel_function_calling: Optional[bool]
|
supports_parallel_function_calling: Optional[bool]
|
||||||
|
supports_web_search: Optional[bool]
|
||||||
|
|
||||||
|
|
||||||
class ModelInfoBase(ProviderSpecificModelInfo, total=False):
|
class ModelInfoBase(ProviderSpecificModelInfo, total=False):
|
||||||
|
|
|
@ -4544,6 +4544,7 @@ def _get_model_info_helper( # noqa: PLR0915
|
||||||
supports_native_streaming=_model_info.get(
|
supports_native_streaming=_model_info.get(
|
||||||
"supports_native_streaming", None
|
"supports_native_streaming", None
|
||||||
),
|
),
|
||||||
|
supports_web_search=_model_info.get("supports_web_search", False),
|
||||||
tpm=_model_info.get("tpm", None),
|
tpm=_model_info.get("tpm", None),
|
||||||
rpm=_model_info.get("rpm", None),
|
rpm=_model_info.get("rpm", None),
|
||||||
)
|
)
|
||||||
|
@ -4612,6 +4613,7 @@ def get_model_info(model: str, custom_llm_provider: Optional[str] = None) -> Mod
|
||||||
supports_audio_input: Optional[bool]
|
supports_audio_input: Optional[bool]
|
||||||
supports_audio_output: Optional[bool]
|
supports_audio_output: Optional[bool]
|
||||||
supports_pdf_input: Optional[bool]
|
supports_pdf_input: Optional[bool]
|
||||||
|
supports_web_search: Optional[bool]
|
||||||
Raises:
|
Raises:
|
||||||
Exception: If the model is not mapped yet.
|
Exception: If the model is not mapped yet.
|
||||||
|
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue