fix(proxy_server.py): fix model alias map + add back testing

This commit is contained in:
Krrish Dholakia 2024-03-07 07:56:51 -08:00
parent b9854a99d2
commit dd78a1956a
2 changed files with 71 additions and 6 deletions

View file

@ -2541,6 +2541,12 @@ async def completion(
if user_api_base:
data["api_base"] = user_api_base
### MODEL ALIAS MAPPING ###
# check if model name in model alias map
# get the actual model name
if data["model"] in litellm.model_alias_map:
data["model"] = litellm.model_alias_map[data["model"]]
### CALL HOOKS ### - modify incoming data before calling the model
data = await proxy_logging_obj.pre_call_hook(
user_api_key_dict=user_api_key_dict, data=data, call_type="completion"
@ -2740,6 +2746,12 @@ async def chat_completion(
if user_api_base:
data["api_base"] = user_api_base
### MODEL ALIAS MAPPING ###
# check if model name in model alias map
# get the actual model name
if data["model"] in litellm.model_alias_map:
data["model"] = litellm.model_alias_map[data["model"]]
### CALL HOOKS ### - modify incoming data before calling the model
data = await proxy_logging_obj.pre_call_hook(
user_api_key_dict=user_api_key_dict, data=data, call_type="completion"
@ -2948,6 +2960,12 @@ async def embeddings(
**data,
} # add the team-specific configs to the completion call
### MODEL ALIAS MAPPING ###
# check if model name in model alias map
# get the actual model name
if data["model"] in litellm.model_alias_map:
data["model"] = litellm.model_alias_map[data["model"]]
router_model_names = (
[m["model_name"] for m in llm_model_list]
if llm_model_list is not None
@ -3119,6 +3137,12 @@ async def image_generation(
**data,
} # add the team-specific configs to the completion call
### MODEL ALIAS MAPPING ###
# check if model name in model alias map
# get the actual model name
if data["model"] in litellm.model_alias_map:
data["model"] = litellm.model_alias_map[data["model"]]
router_model_names = (
[m["model_name"] for m in llm_model_list]
if llm_model_list is not None