diff --git a/llama_stack/distribution/ui/page/playground/chat.py b/llama_stack/distribution/ui/page/playground/chat.py index 2fb5b6c45..0b8073756 100644 --- a/llama_stack/distribution/ui/page/playground/chat.py +++ b/llama_stack/distribution/ui/page/playground/chat.py @@ -12,9 +12,7 @@ with st.sidebar: st.header("Configuration") available_models = llama_stack_api.client.models.list() available_models = [ - model.identifier - for model in available_models - if model.identifier.startswith("meta-llama") + model.identifier for model in available_models if model.model_type == "llm" ] selected_model = st.selectbox( "Choose a model", diff --git a/llama_stack/distribution/ui/page/playground/rag.py b/llama_stack/distribution/ui/page/playground/rag.py index 6b5a2ef87..196c889ba 100644 --- a/llama_stack/distribution/ui/page/playground/rag.py +++ b/llama_stack/distribution/ui/page/playground/rag.py @@ -75,9 +75,7 @@ def rag_chat_page(): available_models = llama_stack_api.client.models.list() available_models = [ - model.identifier - for model in available_models - if model.identifier.startswith("meta-llama") + model.identifier for model in available_models if model.model_type == "llm" ] selected_model = st.selectbox( "Choose a model",