From 75c54547eb87bac690098eefe0f51f0c5809ed17 Mon Sep 17 00:00:00 2001 From: Ihar Hrachyshka Date: Mon, 5 May 2025 15:53:05 -0400 Subject: [PATCH] fix: Don't require efficiency_config for torchtune It was enforced by mistake when 0751a960a518785a821407bee4b855fbf56e88cb merged. Other asserts made sense in that the code was written, potentially, to always expect a non-None value. But not efficiency_config. Signed-off-by: Ihar Hrachyshka --- .../torchtune/recipes/lora_finetuning_single_device.py | 3 --- 1 file changed, 3 deletions(-) diff --git a/llama_stack/providers/inline/post_training/torchtune/recipes/lora_finetuning_single_device.py b/llama_stack/providers/inline/post_training/torchtune/recipes/lora_finetuning_single_device.py index 1239523cd..b5a495935 100644 --- a/llama_stack/providers/inline/post_training/torchtune/recipes/lora_finetuning_single_device.py +++ b/llama_stack/providers/inline/post_training/torchtune/recipes/lora_finetuning_single_device.py @@ -39,7 +39,6 @@ from llama_stack.apis.datasets import Datasets from llama_stack.apis.post_training import ( Checkpoint, DataConfig, - EfficiencyConfig, LoraFinetuningConfig, OptimizerConfig, QATFinetuningConfig, @@ -90,8 +89,6 @@ class LoraFinetuningSingleDevice: ) -> None: assert isinstance(training_config.data_config, DataConfig), "DataConfig must be initialized" - assert isinstance(training_config.efficiency_config, EfficiencyConfig), "EfficiencyConfig must be initialized" - self.job_uuid = job_uuid self.training_config = training_config if not isinstance(algorithm_config, LoraFinetuningConfig):