mirror of
https://github.com/meta-llama/llama-stack.git
synced 2025-08-12 04:50:39 +00:00
refactor: introduce common 'ResourceNotFoundError' exception (#3032)
# What does this PR do? 1. Introduce new base custom exception class `ResourceNotFoundError` 2. All other "not found" exception classes now inherit from `ResourceNotFoundError` Closes #3030 Signed-off-by: Nathan Weinberg <nweinber@redhat.com>
This commit is contained in:
parent
dfce05d0c5
commit
e9fced773a
2 changed files with 33 additions and 16 deletions
|
@ -10,6 +10,16 @@
|
||||||
# 3. All classes should propogate the inherited __init__ function otherwise via 'super().__init__(message)'
|
# 3. All classes should propogate the inherited __init__ function otherwise via 'super().__init__(message)'
|
||||||
|
|
||||||
|
|
||||||
|
class ResourceNotFoundError(ValueError):
|
||||||
|
"""generic exception for a missing Llama Stack resource"""
|
||||||
|
|
||||||
|
def __init__(self, resource_name: str, resource_type: str, client_list: str) -> None:
|
||||||
|
message = (
|
||||||
|
f"{resource_type} '{resource_name}' not found. Use '{client_list}' to list available {resource_type}s."
|
||||||
|
)
|
||||||
|
super().__init__(message)
|
||||||
|
|
||||||
|
|
||||||
class UnsupportedModelError(ValueError):
|
class UnsupportedModelError(ValueError):
|
||||||
"""raised when model is not present in the list of supported models"""
|
"""raised when model is not present in the list of supported models"""
|
||||||
|
|
||||||
|
@ -18,38 +28,32 @@ class UnsupportedModelError(ValueError):
|
||||||
super().__init__(message)
|
super().__init__(message)
|
||||||
|
|
||||||
|
|
||||||
class ModelNotFoundError(ValueError):
|
class ModelNotFoundError(ResourceNotFoundError):
|
||||||
"""raised when Llama Stack cannot find a referenced model"""
|
"""raised when Llama Stack cannot find a referenced model"""
|
||||||
|
|
||||||
def __init__(self, model_name: str) -> None:
|
def __init__(self, model_name: str) -> None:
|
||||||
message = f"Model '{model_name}' not found. Use client.models.list() to list available models."
|
super().__init__(model_name, "Model", "client.models.list()")
|
||||||
super().__init__(message)
|
|
||||||
|
|
||||||
|
|
||||||
class VectorStoreNotFoundError(ValueError):
|
class VectorStoreNotFoundError(ResourceNotFoundError):
|
||||||
"""raised when Llama Stack cannot find a referenced vector store"""
|
"""raised when Llama Stack cannot find a referenced vector store"""
|
||||||
|
|
||||||
def __init__(self, vector_store_name: str) -> None:
|
def __init__(self, vector_store_name: str) -> None:
|
||||||
message = f"Vector store '{vector_store_name}' not found. Use client.vector_dbs.list() to list available vector stores."
|
super().__init__(vector_store_name, "Vector Store", "client.vector_dbs.list()")
|
||||||
super().__init__(message)
|
|
||||||
|
|
||||||
|
|
||||||
class DatasetNotFoundError(ValueError):
|
class DatasetNotFoundError(ResourceNotFoundError):
|
||||||
"""raised when Llama Stack cannot find a referenced dataset"""
|
"""raised when Llama Stack cannot find a referenced dataset"""
|
||||||
|
|
||||||
def __init__(self, dataset_name: str) -> None:
|
def __init__(self, dataset_name: str) -> None:
|
||||||
message = f"Dataset '{dataset_name}' not found. Use client.datasets.list() to list available datasets."
|
super().__init__(dataset_name, "Dataset", "client.datasets.list()")
|
||||||
super().__init__(message)
|
|
||||||
|
|
||||||
|
|
||||||
class ToolGroupNotFoundError(ValueError):
|
class ToolGroupNotFoundError(ResourceNotFoundError):
|
||||||
"""raised when Llama Stack cannot find a referenced tool group"""
|
"""raised when Llama Stack cannot find a referenced tool group"""
|
||||||
|
|
||||||
def __init__(self, toolgroup_name: str) -> None:
|
def __init__(self, toolgroup_name: str) -> None:
|
||||||
message = (
|
super().__init__(toolgroup_name, "Tool Group", "client.toolgroups.list()")
|
||||||
f"Tool group '{toolgroup_name}' not found. Use client.toolgroups.list() to list available tool groups."
|
|
||||||
)
|
|
||||||
super().__init__(message)
|
|
||||||
|
|
||||||
|
|
||||||
class SessionNotFoundError(ValueError):
|
class SessionNotFoundError(ValueError):
|
||||||
|
|
|
@ -4,9 +4,12 @@
|
||||||
# This source code is licensed under the terms described in the LICENSE file in
|
# This source code is licensed under the terms described in the LICENSE file in
|
||||||
# the root directory of this source tree.
|
# the root directory of this source tree.
|
||||||
|
|
||||||
|
import re
|
||||||
|
|
||||||
import pytest
|
import pytest
|
||||||
|
|
||||||
from llama_stack import LlamaStackAsLibraryClient
|
from llama_stack import LlamaStackAsLibraryClient
|
||||||
|
from llama_stack.apis.common.errors import ToolGroupNotFoundError
|
||||||
from tests.common.mcp import MCP_TOOLGROUP_ID, make_mcp_server
|
from tests.common.mcp import MCP_TOOLGROUP_ID, make_mcp_server
|
||||||
|
|
||||||
|
|
||||||
|
@ -48,8 +51,18 @@ def test_register_and_unregister_toolgroup(llama_stack_client):
|
||||||
llama_stack_client.toolgroups.unregister(toolgroup_id=test_toolgroup_id)
|
llama_stack_client.toolgroups.unregister(toolgroup_id=test_toolgroup_id)
|
||||||
|
|
||||||
# Verify it is unregistered
|
# Verify it is unregistered
|
||||||
with pytest.raises(Exception, match=f"Tool group '{test_toolgroup_id}' not found"):
|
with pytest.raises(
|
||||||
|
ToolGroupNotFoundError,
|
||||||
|
match=re.escape(
|
||||||
|
f"Tool Group '{test_toolgroup_id}' not found. Use 'client.toolgroups.list()' to list available Tool Groups."
|
||||||
|
),
|
||||||
|
):
|
||||||
llama_stack_client.toolgroups.get(toolgroup_id=test_toolgroup_id)
|
llama_stack_client.toolgroups.get(toolgroup_id=test_toolgroup_id)
|
||||||
|
|
||||||
with pytest.raises(Exception, match=f"Tool group '{test_toolgroup_id}' not found"):
|
with pytest.raises(
|
||||||
|
ToolGroupNotFoundError,
|
||||||
|
match=re.escape(
|
||||||
|
f"Tool Group '{test_toolgroup_id}' not found. Use 'client.toolgroups.list()' to list available Tool Groups."
|
||||||
|
),
|
||||||
|
):
|
||||||
llama_stack_client.tools.list(toolgroup_id=test_toolgroup_id)
|
llama_stack_client.tools.list(toolgroup_id=test_toolgroup_id)
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue