From 75e72cf2fc93bf0098f5b9ad26144d421abe6ef5 Mon Sep 17 00:00:00 2001 From: Xi Yan Date: Tue, 17 Dec 2024 19:42:38 -0800 Subject: [PATCH] model_type=llm for filering available models for playground --- llama_stack/distribution/ui/page/playground/chat.py | 4 +--- llama_stack/distribution/ui/page/playground/rag.py | 4 +--- 2 files changed, 2 insertions(+), 6 deletions(-) diff --git a/llama_stack/distribution/ui/page/playground/chat.py b/llama_stack/distribution/ui/page/playground/chat.py index 2fb5b6c45..0b8073756 100644 --- a/llama_stack/distribution/ui/page/playground/chat.py +++ b/llama_stack/distribution/ui/page/playground/chat.py @@ -12,9 +12,7 @@ with st.sidebar: st.header("Configuration") available_models = llama_stack_api.client.models.list() available_models = [ - model.identifier - for model in available_models - if model.identifier.startswith("meta-llama") + model.identifier for model in available_models if model.model_type == "llm" ] selected_model = st.selectbox( "Choose a model", diff --git a/llama_stack/distribution/ui/page/playground/rag.py b/llama_stack/distribution/ui/page/playground/rag.py index 6b5a2ef87..196c889ba 100644 --- a/llama_stack/distribution/ui/page/playground/rag.py +++ b/llama_stack/distribution/ui/page/playground/rag.py @@ -75,9 +75,7 @@ def rag_chat_page(): available_models = llama_stack_api.client.models.list() available_models = [ - model.identifier - for model in available_models - if model.identifier.startswith("meta-llama") + model.identifier for model in available_models if model.model_type == "llm" ] selected_model = st.selectbox( "Choose a model",