llama-stack-mirror/llama_stack/apis/common/errors.py
Francisco Javier Arceo 01d90eb8ab chore: Enabling teste for Weaviate and some minor changes
Signed-off-by: Francisco Javier Arceo <farceo@redhat.com>
2025-07-30 15:32:35 -04:00

21 lines
838 B
Python

# Copyright (c) Meta Platforms, Inc. and affiliates.
# All rights reserved.
#
# This source code is licensed under the terms described in the LICENSE file in
# the root directory of this source tree.
class UnsupportedModelError(ValueError):
"""raised when model is not present in the list of supported models"""
def __init__(self, model_name: str, supported_models_list: list[str]):
message = f"'{model_name}' model is not supported. Supported models are: {', '.join(supported_models_list)}"
super().__init__(message)
class ModelNotFoundError(ValueError):
"""raised when Llama Stack cannot find a referenced model"""
def __init__(self, model_name: str) -> None:
message = f"Model '{model_name}' not found. Use client.models.list() to list available models."
super().__init__(message)