From 2f096ca5094ce90def443a0b5fc0190cb31b063a Mon Sep 17 00:00:00 2001 From: Byung Chun Kim Date: Mon, 30 Sep 2024 12:16:50 +0900 Subject: [PATCH] accepts not model itself. (#153) --- llama_stack/providers/utils/inference/augment_messages.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/llama_stack/providers/utils/inference/augment_messages.py b/llama_stack/providers/utils/inference/augment_messages.py index 9f1f000e3..10375cf0e 100644 --- a/llama_stack/providers/utils/inference/augment_messages.py +++ b/llama_stack/providers/utils/inference/augment_messages.py @@ -34,7 +34,7 @@ def augment_messages_for_tools(request: ChatCompletionRequest) -> List[Message]: return request.messages if model.model_family == ModelFamily.llama3_1 or ( - model.model_family == ModelFamily.llama3_2 and is_multimodal(model) + model.model_family == ModelFamily.llama3_2 and is_multimodal(model.core_model_id) ): # llama3.1 and llama3.2 multimodal models follow the same tool prompt format return augment_messages_for_tools_llama_3_1(request)