mirror of
https://github.com/BerriAI/litellm.git
synced 2025-04-26 11:14:04 +00:00
(fix) proxy safe access router model names
This commit is contained in:
parent
d920801fb8
commit
b10e7b7973
1 changed files with 1 additions and 1 deletions
|
@ -454,7 +454,7 @@ def litellm_completion(*args, **kwargs):
|
|||
if user_api_base:
|
||||
kwargs["api_base"] = user_api_base
|
||||
## ROUTE TO CORRECT ENDPOINT ##
|
||||
router_model_names = [m["model_name"] for m in llm_model_list]
|
||||
router_model_names = [m["model_name"] for m in llm_model_list] if llm_model_list is not None else []
|
||||
if llm_router is not None and kwargs["model"] in router_model_names: # model in router model list
|
||||
if call_type == "chat_completion":
|
||||
response = llm_router.completion(*args, **kwargs)
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue