forked from phoenix/litellm-mirror
fix(types/router.py): modelgroupinfo to handle mode being None and supported_openai_params not being a list
This commit is contained in:
parent
0d3add56a6
commit
6306914e56
3 changed files with 25 additions and 6 deletions
|
@ -3610,7 +3610,7 @@ class Router:
|
|||
# get model info
|
||||
try:
|
||||
model_info = litellm.get_model_info(model=litellm_params.model)
|
||||
except Exception as e:
|
||||
except Exception:
|
||||
model_info = None
|
||||
# get llm provider
|
||||
try:
|
||||
|
@ -3619,7 +3619,9 @@ class Router:
|
|||
custom_llm_provider=litellm_params.custom_llm_provider,
|
||||
)
|
||||
except litellm.exceptions.BadRequestError as e:
|
||||
continue
|
||||
verbose_router_logger.error(
|
||||
"litellm.router.py::get_model_group_info() - {}".format(str(e))
|
||||
)
|
||||
|
||||
if model_info is None:
|
||||
supported_openai_params = litellm.get_supported_openai_params(
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue