From 2537d0cb5d89444f926b3c4eb6d4c269d79915f4 Mon Sep 17 00:00:00 2001 From: Charles Li Date: Tue, 31 Mar 2026 01:52:09 +0000 Subject: [PATCH] Using synthetic dataset_type to decrease the training time --- tests/integration/gradient_accumulation_test.py | 1 + 1 file changed, 1 insertion(+) diff --git a/tests/integration/gradient_accumulation_test.py b/tests/integration/gradient_accumulation_test.py index 468c7aced8..5b464147c5 100644 --- a/tests/integration/gradient_accumulation_test.py +++ b/tests/integration/gradient_accumulation_test.py @@ -62,6 +62,7 @@ def test_grad_accumulate_same_loss(self): get_test_config_path(), f"base_output_directory={self.base_output_directory}", f"dataset_path={self.dataset_path}", + "dataset_type=synthetic", "gradient_clipping_threshold=0", # Ensures we are testing raw scales of gradients (clipping off) "enable_checkpointing=False", "enable_goodput_recording=False",