mirror of
https://github.com/meta-llama/llama-stack.git
synced 2025-06-28 02:53:30 +00:00
- llama_stack/exceptions.py: Add UnsupportedModelError class - remote inference ollama.py and utils/inference/model_registry.py: Changed ValueError in favor of UnsupportedModelError - utils/inference/litellm_openai_mixin.py: remote register_model func. Now uses parent class ModelRegistry's func Closes #2517
13 lines
545 B
Python
13 lines
545 B
Python
# Copyright (c) Meta Platforms, Inc. and affiliates.
|
|
# All rights reserved.
|
|
#
|
|
# This source code is licensed under the terms described in the LICENSE file in
|
|
# the root directory of this source tree.
|
|
|
|
|
|
class UnsupportedModelError(ValueError):
|
|
"""raised when model is not present in the list of supported models"""
|
|
|
|
def __init__(self, model_name: str, supported_models_list: list[str]):
|
|
message = f"'{model_name}' model is not supported. Supported models are: {', '.join(supported_models_list)}"
|
|
super().__init__(message)
|