From f5c36c47eda09affb72d8c3ef7e21fa608034a54 Mon Sep 17 00:00:00 2001 From: varunfb Date: Tue, 10 Dec 2024 20:03:31 -0800 Subject: [PATCH] Added support for llama 3.3 model (#601) # What does this PR do? Llama-Stack does not support the 3.3 model. So added the support so llama-stack can do inferencing with 3.3 model. --- llama_stack/providers/utils/inference/__init__.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/llama_stack/providers/utils/inference/__init__.py b/llama_stack/providers/utils/inference/__init__.py index d204f98a4..553d02418 100644 --- a/llama_stack/providers/utils/inference/__init__.py +++ b/llama_stack/providers/utils/inference/__init__.py @@ -27,7 +27,8 @@ def supported_inference_models() -> List[Model]: m for m in all_registered_models() if ( - m.model_family in {ModelFamily.llama3_1, ModelFamily.llama3_2} + m.model_family + in {ModelFamily.llama3_1, ModelFamily.llama3_2, ModelFamily.llama3_3} or is_supported_safety_model(m) ) ]