fix: address reviewer feedback - improve conditional imports and remove provider alias logic\n\n- Improve conditional import approach with better documentation\n- Remove provider-specific alias logic from sku_list.py\n- Conditional imports are necessary because llama4 requires torch\n- Addresses @ashwinb and @raghotham feedback while maintaining compatibility

This commit is contained in:
skamenan7 2025-07-10 12:06:14 -04:00
parent 61dc2a9c58
commit e5377d078d
2 changed files with 5 additions and 15 deletions

View file

@ -52,7 +52,7 @@ from llama_stack.models.llama.llama3.prompt_templates import (
)
from llama_stack.models.llama.llama3.tokenizer import Tokenizer
# Conditional imports to avoid heavy dependencies during module loading
# Import llama4 components - these require torch to be available
try:
from llama_models.llama4.chat_format import ChatFormat as Llama4ChatFormat
from llama_models.llama4.prompt_templates.system_prompts import (
@ -62,8 +62,11 @@ try:
LLAMA4_AVAILABLE = True
except ImportError:
# Llama4 dependencies not available (e.g., torch not installed)
# Llama4 requires torch - if not available, we can't use Llama4 features
LLAMA4_AVAILABLE = False
Llama4ChatFormat = None
PythonListCustomToolGeneratorLlama4 = None
Llama4Tokenizer = None
from llama_stack.models.llama.sku_list import resolve_model
from llama_stack.models.llama.sku_types import ModelFamily, is_multimodal
from llama_stack.providers.utils.inference import supported_inference_models