From a7c6328d0c50907320a1b3910e43fa960e5228e1 Mon Sep 17 00:00:00 2001 From: Rohan Awhad Date: Fri, 27 Jun 2025 13:59:05 -0400 Subject: [PATCH] chore: moved from general exceptions to apis/common/exceptions.py --- llama_stack/{exceptions.py => apis/common/errors.py} | 0 llama_stack/providers/remote/inference/ollama/ollama.py | 2 +- llama_stack/providers/utils/inference/litellm_openai_mixin.py | 2 +- llama_stack/providers/utils/inference/model_registry.py | 2 +- 4 files changed, 3 insertions(+), 3 deletions(-) rename llama_stack/{exceptions.py => apis/common/errors.py} (100%) diff --git a/llama_stack/exceptions.py b/llama_stack/apis/common/errors.py similarity index 100% rename from llama_stack/exceptions.py rename to llama_stack/apis/common/errors.py diff --git a/llama_stack/providers/remote/inference/ollama/ollama.py b/llama_stack/providers/remote/inference/ollama/ollama.py index e7c0d1e05..2d83bf82b 100644 --- a/llama_stack/providers/remote/inference/ollama/ollama.py +++ b/llama_stack/providers/remote/inference/ollama/ollama.py @@ -18,6 +18,7 @@ from llama_stack.apis.common.content_types import ( InterleavedContentItem, TextContentItem, ) +from llama_stack.apis.common.errors import UnsupportedModelError from llama_stack.apis.inference import ( ChatCompletionRequest, ChatCompletionResponse, @@ -48,7 +49,6 @@ from llama_stack.apis.inference import ( ToolPromptFormat, ) from llama_stack.apis.models import Model, ModelType -from llama_stack.exceptions import UnsupportedModelError from llama_stack.log import get_logger from llama_stack.providers.datatypes import ( HealthResponse, diff --git a/llama_stack/providers/utils/inference/litellm_openai_mixin.py b/llama_stack/providers/utils/inference/litellm_openai_mixin.py index e511d1158..188e82125 100644 --- a/llama_stack/providers/utils/inference/litellm_openai_mixin.py +++ b/llama_stack/providers/utils/inference/litellm_openai_mixin.py @@ -13,6 +13,7 @@ from llama_stack.apis.common.content_types import ( InterleavedContent, InterleavedContentItem, ) +from llama_stack.apis.common.errors import UnsupportedModelError from llama_stack.apis.inference import ( ChatCompletionRequest, ChatCompletionResponse, @@ -40,7 +41,6 @@ from llama_stack.apis.inference import ( ) from llama_stack.apis.models import Model from llama_stack.distribution.request_headers import NeedsRequestProviderData -from llama_stack.exceptions import UnsupportedModelError from llama_stack.log import get_logger from llama_stack.providers.utils.inference.model_registry import ModelRegistryHelper from llama_stack.providers.utils.inference.openai_compat import ( diff --git a/llama_stack/providers/utils/inference/model_registry.py b/llama_stack/providers/utils/inference/model_registry.py index bbaf90779..46c0ca7b5 100644 --- a/llama_stack/providers/utils/inference/model_registry.py +++ b/llama_stack/providers/utils/inference/model_registry.py @@ -8,8 +8,8 @@ from typing import Any from pydantic import BaseModel, Field +from llama_stack.apis.common.errors import UnsupportedModelError from llama_stack.apis.models import ModelType -from llama_stack.exceptions import UnsupportedModelError from llama_stack.models.llama.sku_list import all_registered_models from llama_stack.providers.datatypes import Model, ModelsProtocolPrivate from llama_stack.providers.utils.inference import (