chore: standardize unsupported model error #2517

- llama_stack/exceptions.py: Add UnsupportedModelError class
- remote inference ollama.py and utils/inference/model_registry.py:
Changed ValueError in favor of UnsupportedModelError
- utils/inference/litellm_openai_mixin.py: remote register_model func.
Now uses parent class ModelRegistry's func

Closes #2517
This commit is contained in:
Rohan Awhad 2025-06-25 11:10:58 -04:00
parent cfee63bd0d
commit 7ccf83fb74
4 changed files with 17 additions and 13 deletions

View file

@ -51,6 +51,7 @@ from llama_stack.apis.inference.inference import (
OpenAIResponseFormatParam,
)
from llama_stack.apis.models import Model, ModelType
from llama_stack.exceptions import UnsupportedModelError
from llama_stack.log import get_logger
from llama_stack.providers.datatypes import (
HealthResponse,
@ -374,9 +375,7 @@ class OllamaInferenceAdapter(
f"Imprecise provider resource id was used but 'latest' is available in Ollama - using '{model.provider_resource_id}:latest'"
)
return model
raise ValueError(
f"Model '{model.provider_resource_id}' is not available in Ollama. Available models: {', '.join(available_models)}"
)
raise UnsupportedModelError(model.provider_resource_id, available_models)
model.provider_resource_id = provider_resource_id
return model