From 78b07ddc92a25deb80af25f83d56eb0620129b1c Mon Sep 17 00:00:00 2001 From: Byung Chun Kim Date: Mon, 30 Sep 2024 02:29:19 +0000 Subject: [PATCH] accepts not model itself. --- llama_stack/providers/utils/inference/augment_messages.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/llama_stack/providers/utils/inference/augment_messages.py b/llama_stack/providers/utils/inference/augment_messages.py index 9f1f000e3..10375cf0e 100644 --- a/llama_stack/providers/utils/inference/augment_messages.py +++ b/llama_stack/providers/utils/inference/augment_messages.py @@ -34,7 +34,7 @@ def augment_messages_for_tools(request: ChatCompletionRequest) -> List[Message]: return request.messages if model.model_family == ModelFamily.llama3_1 or ( - model.model_family == ModelFamily.llama3_2 and is_multimodal(model) + model.model_family == ModelFamily.llama3_2 and is_multimodal(model.core_model_id) ): # llama3.1 and llama3.2 multimodal models follow the same tool prompt format return augment_messages_for_tools_llama_3_1(request)