From 81c7d6fa2ed2ca96b1799060abd24b0ae8a7015a Mon Sep 17 00:00:00 2001 From: Ashwin Bharambe Date: Tue, 29 Jul 2025 14:20:09 -0700 Subject: [PATCH] chore(ci): disable post training tests (#2953) Post training tests need _much_ better thinking before we can re-enable them to be run on every single PR. Running periodically should be approached only when it is shown that the tests are reliable and as light-weight as can be; otherwise, it is just kicking the can down the road. --- tests/integration/post_training/test_post_training.py | 10 ++++++++++ 1 file changed, 10 insertions(+) diff --git a/tests/integration/post_training/test_post_training.py b/tests/integration/post_training/test_post_training.py index 0c30184ef..93ca4c32d 100644 --- a/tests/integration/post_training/test_post_training.py +++ b/tests/integration/post_training/test_post_training.py @@ -22,6 +22,15 @@ logging.basicConfig(level=logging.INFO, format="%(asctime)s - %(levelname)s - %( logger = logging.getLogger(__name__) +skip_because_resource_intensive = pytest.mark.skip( + reason=""" + Post training tests are extremely resource intensive. They download large models and partly as a result, + are very slow to run. We cannot run them on every single PR update. CI should be considered + a scarce resource and properly utilitized. + """ +) + + @pytest.fixture(autouse=True) def capture_output(capsys): """Fixture to capture and display output during test execution.""" @@ -57,6 +66,7 @@ class TestPostTraining: ], ) @pytest.mark.timeout(360) # 6 minutes timeout + @skip_because_resource_intensive def test_supervised_fine_tune(self, llama_stack_client, purpose, source): logger.info("Starting supervised fine-tuning test")