From 904def61190eb08802cbe92052498cbdf35e596d Mon Sep 17 00:00:00 2001 From: Krrish Dholakia Date: Tue, 21 Nov 2023 20:15:43 -0800 Subject: [PATCH] fix(proxy_server.py): fix /models endpoint --- litellm/proxy/proxy_server.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/litellm/proxy/proxy_server.py b/litellm/proxy/proxy_server.py index 8cf1dd5e5..c43856d26 100644 --- a/litellm/proxy/proxy_server.py +++ b/litellm/proxy/proxy_server.py @@ -474,7 +474,8 @@ def model_list(): if server_settings.get("infer_model_from_keys", False): all_models = litellm.utils.get_valid_models() if llm_model_list: - all_models += llm_model_list + print(f"llm model list: {llm_model_list}") + all_models += [m["model_name"] for m in llm_model_list] if user_model is not None: all_models += user_model ### CHECK OLLAMA MODELS ### @@ -484,7 +485,7 @@ def model_list(): ollama_models = [m["name"].replace(":latest", "") for m in models] all_models.extend(ollama_models) except Exception as e: - traceback.print_exc() + pass return dict( data=[ {