fix(utils.py): check if model info is for model with correct provider

Fixes issue where incorrect pricing was used for custom llm provider
This commit is contained in:
Krrish Dholakia 2024-06-13 15:54:24 -07:00
parent d210eccb79
commit 345094a49d
8 changed files with 55 additions and 18 deletions

View file

@ -11402,7 +11402,7 @@ async def model_info_v2(
for _model in all_models:
# provided model_info in config.yaml
model_info = _model.get("model_info", {})
if debug == True:
if debug is True:
_openai_client = "None"
if llm_router is not None:
_openai_client = (
@ -11427,7 +11427,7 @@ async def model_info_v2(
litellm_model = litellm_params.get("model", None)
try:
litellm_model_info = litellm.get_model_info(model=litellm_model)
except:
except Exception:
litellm_model_info = {}
# 3rd pass on the model, try seeing if we can find model but without the "/" in model cost map
if litellm_model_info == {}:
@ -11438,8 +11438,10 @@ async def model_info_v2(
if len(split_model) > 0:
litellm_model = split_model[-1]
try:
litellm_model_info = litellm.get_model_info(model=litellm_model)
except:
litellm_model_info = litellm.get_model_info(
model=litellm_model, custom_llm_provider=split_model[0]
)
except Exception:
litellm_model_info = {}
for k, v in litellm_model_info.items():
if k not in model_info:
@ -11950,7 +11952,9 @@ async def model_info_v1(
if len(split_model) > 0:
litellm_model = split_model[-1]
try:
litellm_model_info = litellm.get_model_info(model=litellm_model)
litellm_model_info = litellm.get_model_info(
model=litellm_model, custom_llm_provider=split_model[0]
)
except:
litellm_model_info = {}
for k, v in litellm_model_info.items():