llama-stack/llama_stack/providers/utils/inference/__init__.py
varunfb f5c36c47ed
Added support for llama 3.3 model (#601)
# What does this PR do?

Llama-Stack does not support the 3.3 model. So added the support so
llama-stack can do inferencing with 3.3 model.
2024-12-10 20:03:31 -08:00

39 lines
1.1 KiB
Python

# Copyright (c) Meta Platforms, Inc. and affiliates.
# All rights reserved.
#
# This source code is licensed under the terms described in the LICENSE file in
# the root directory of this source tree.
from typing import List
from llama_models.datatypes import * # noqa: F403
from llama_models.sku_list import all_registered_models
def is_supported_safety_model(model: Model) -> bool:
if model.quantization_format != CheckpointQuantizationFormat.bf16:
return False
model_id = model.core_model_id
return model_id in [
CoreModelId.llama_guard_3_8b,
CoreModelId.llama_guard_3_1b,
CoreModelId.llama_guard_3_11b_vision,
]
def supported_inference_models() -> List[Model]:
return [
m
for m in all_registered_models()
if (
m.model_family
in {ModelFamily.llama3_1, ModelFamily.llama3_2, ModelFamily.llama3_3}
or is_supported_safety_model(m)
)
]
ALL_HUGGINGFACE_REPOS_TO_MODEL_DESCRIPTOR = {
m.huggingface_repo: m.descriptor() for m in all_registered_models()
}