From c219a74fa0a3a4eb3b33a48126711ff37f9af7ac Mon Sep 17 00:00:00 2001 From: Ihar Hrachyshka Date: Tue, 6 May 2025 12:50:44 -0400 Subject: [PATCH] fix: Don't require efficiency_config for torchtune (#2104) # What does this PR do? Revert a change that by mistake forced efficiency_config on torchtune provider users. ``` fix: Don't require efficiency_config for torchtune It was enforced by mistake when 0751a960a518785a821407bee4b855fbf56e88cb merged. Other asserts made sense in that the code was written, potentially, to always expect a non-None value. But not efficiency_config. ``` Signed-off-by: Ihar Hrachyshka --- .../torchtune/recipes/lora_finetuning_single_device.py | 3 --- 1 file changed, 3 deletions(-) diff --git a/llama_stack/providers/inline/post_training/torchtune/recipes/lora_finetuning_single_device.py b/llama_stack/providers/inline/post_training/torchtune/recipes/lora_finetuning_single_device.py index 1239523cd..b5a495935 100644 --- a/llama_stack/providers/inline/post_training/torchtune/recipes/lora_finetuning_single_device.py +++ b/llama_stack/providers/inline/post_training/torchtune/recipes/lora_finetuning_single_device.py @@ -39,7 +39,6 @@ from llama_stack.apis.datasets import Datasets from llama_stack.apis.post_training import ( Checkpoint, DataConfig, - EfficiencyConfig, LoraFinetuningConfig, OptimizerConfig, QATFinetuningConfig, @@ -90,8 +89,6 @@ class LoraFinetuningSingleDevice: ) -> None: assert isinstance(training_config.data_config, DataConfig), "DataConfig must be initialized" - assert isinstance(training_config.efficiency_config, EfficiencyConfig), "EfficiencyConfig must be initialized" - self.job_uuid = job_uuid self.training_config = training_config if not isinstance(algorithm_config, LoraFinetuningConfig):