forked from phoenix-oss/llama-stack-mirror
model selection playground fix
This commit is contained in:
parent
3700022d6f
commit
af8f1b3531
2 changed files with 10 additions and 4 deletions
|
@ -11,7 +11,11 @@ from modules.api import llama_stack_api
|
|||
with st.sidebar:
|
||||
st.header("Configuration")
|
||||
available_models = llama_stack_api.client.models.list()
|
||||
available_models = [model.identifier for model in available_models]
|
||||
available_models = [
|
||||
model.identifier
|
||||
for model in available_models
|
||||
if model.identifier.startswith("meta-llama")
|
||||
]
|
||||
selected_model = st.selectbox(
|
||||
"Choose a model",
|
||||
available_models,
|
||||
|
|
|
@ -74,7 +74,11 @@ def rag_chat_page():
|
|||
]
|
||||
|
||||
available_models = llama_stack_api.client.models.list()
|
||||
available_models = [model.identifier for model in available_models]
|
||||
available_models = [
|
||||
model.identifier
|
||||
for model in available_models
|
||||
if model.identifier.startswith("meta-llama")
|
||||
]
|
||||
selected_model = st.selectbox(
|
||||
"Choose a model",
|
||||
available_models,
|
||||
|
@ -116,8 +120,6 @@ def rag_chat_page():
|
|||
with st.chat_message(message["role"]):
|
||||
st.markdown(message["content"])
|
||||
|
||||
selected_model = llama_stack_api.client.models.list()[0].identifier
|
||||
|
||||
agent_config = AgentConfig(
|
||||
model=selected_model,
|
||||
instructions=system_prompt,
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue