From 4e81b1e650bc115e95119637a7c1fb4b837d8e8b Mon Sep 17 00:00:00 2001 From: Matthew Farrellee Date: Tue, 1 Apr 2025 09:41:35 -0400 Subject: [PATCH] use ollama list to find models --- llama_stack/providers/remote/inference/ollama/ollama.py | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/llama_stack/providers/remote/inference/ollama/ollama.py b/llama_stack/providers/remote/inference/ollama/ollama.py index 36941480c..cc8053d73 100644 --- a/llama_stack/providers/remote/inference/ollama/ollama.py +++ b/llama_stack/providers/remote/inference/ollama/ollama.py @@ -293,9 +293,7 @@ class OllamaInferenceAdapter(Inference, ModelsProtocolPrivate): if model.model_type == ModelType.embedding: logger.info(f"Pulling embedding model `{model.provider_resource_id}` if necessary...") await self.client.pull(model.provider_resource_id) - response = await self.client.list() - else: - response = await self.client.ps() + response = await self.client.list() available_models = [m["model"] for m in response["models"]] if model.provider_resource_id not in available_models: raise ValueError(