From e5377d078daf8e39032ec5047814726b431974be Mon Sep 17 00:00:00 2001 From: skamenan7 Date: Thu, 10 Jul 2025 12:06:14 -0400 Subject: [PATCH] fix: address reviewer feedback - improve conditional imports and remove provider alias logic\n\n- Improve conditional import approach with better documentation\n- Remove provider-specific alias logic from sku_list.py\n- Conditional imports are necessary because llama4 requires torch\n- Addresses @ashwinb and @raghotham feedback while maintaining compatibility --- llama_stack/models/llama/sku_list.py | 13 ------------- .../providers/utils/inference/prompt_adapter.py | 7 +++++-- 2 files changed, 5 insertions(+), 15 deletions(-) diff --git a/llama_stack/models/llama/sku_list.py b/llama_stack/models/llama/sku_list.py index 730e9f770..594f6f72a 100644 --- a/llama_stack/models/llama/sku_list.py +++ b/llama_stack/models/llama/sku_list.py @@ -23,19 +23,6 @@ def resolve_model(descriptor: str) -> Model | None: if descriptor in (m.descriptor(), m.huggingface_repo): return m - # Check provider aliases by attempting to import and check common providers - try: - from llama_stack.providers.remote.inference.together.models import MODEL_ENTRIES as TOGETHER_ENTRIES - - for entry in TOGETHER_ENTRIES: - if descriptor in entry.aliases and entry.llama_model: - # Find the model by its descriptor - for m in all_registered_models(): - if m.descriptor() == entry.llama_model: - return m - except ImportError: - pass - return None diff --git a/llama_stack/providers/utils/inference/prompt_adapter.py b/llama_stack/providers/utils/inference/prompt_adapter.py index b88be3ab7..53b7a4f3f 100644 --- a/llama_stack/providers/utils/inference/prompt_adapter.py +++ b/llama_stack/providers/utils/inference/prompt_adapter.py @@ -52,7 +52,7 @@ from llama_stack.models.llama.llama3.prompt_templates import ( ) from llama_stack.models.llama.llama3.tokenizer import Tokenizer -# Conditional imports to avoid heavy dependencies during module loading +# Import llama4 components - these require torch to be available try: from llama_models.llama4.chat_format import ChatFormat as Llama4ChatFormat from llama_models.llama4.prompt_templates.system_prompts import ( @@ -62,8 +62,11 @@ try: LLAMA4_AVAILABLE = True except ImportError: - # Llama4 dependencies not available (e.g., torch not installed) + # Llama4 requires torch - if not available, we can't use Llama4 features LLAMA4_AVAILABLE = False + Llama4ChatFormat = None + PythonListCustomToolGeneratorLlama4 = None + Llama4Tokenizer = None from llama_stack.models.llama.sku_list import resolve_model from llama_stack.models.llama.sku_types import ModelFamily, is_multimodal from llama_stack.providers.utils.inference import supported_inference_models