@@ -91,7 +91,7 @@ def test_pretrain_then_lora_finetune(self, tmp_path):
9191 pretrain_checkpoint_dir ,
9292 pretrain_iters ,
9393 ckpt_format = pretrain_cfg .checkpoint .ckpt_format ,
94- thread_count = pretrain_cfg .checkpoint .thread_count ,
94+ storage_writers_per_rank = pretrain_cfg .checkpoint .storage_writers_per_rank ,
9595 )
9696
9797 # Create LoRA config and run finetuning
@@ -103,7 +103,7 @@ def test_pretrain_then_lora_finetune(self, tmp_path):
103103 lora_checkpoint_dir ,
104104 lora_iters ,
105105 ckpt_format = lora_cfg .checkpoint .ckpt_format ,
106- thread_count = lora_cfg .checkpoint .thread_count ,
106+ storage_writers_per_rank = lora_cfg .checkpoint .storage_writers_per_rank ,
107107 )
108108 verify_peft_checkpoint_smaller (pretrain_checkpoint_dir , lora_checkpoint_dir , pretrain_iters , lora_iters )
109109
@@ -143,7 +143,7 @@ def test_lora_save_and_resume(self, tmp_path):
143143 pretrain_checkpoint_dir ,
144144 pretrain_iters ,
145145 ckpt_format = pretrain_cfg .checkpoint .ckpt_format ,
146- thread_count = pretrain_cfg .checkpoint .thread_count ,
146+ storage_writers_per_rank = pretrain_cfg .checkpoint .storage_writers_per_rank ,
147147 )
148148
149149 # Second run: LoRA finetuning initial phase (will be "interrupted")
@@ -165,7 +165,7 @@ def test_lora_save_and_resume(self, tmp_path):
165165 lora_checkpoint_dir ,
166166 initial_lora_iters ,
167167 ckpt_format = lora_initial_cfg .checkpoint .ckpt_format ,
168- thread_count = lora_initial_cfg .checkpoint .thread_count ,
168+ storage_writers_per_rank = lora_initial_cfg .checkpoint .storage_writers_per_rank ,
169169 )
170170
171171 # Third run: Resume LoRA finetuning from checkpoint (adapter-only states)
@@ -189,7 +189,7 @@ def test_lora_save_and_resume(self, tmp_path):
189189 lora_checkpoint_dir ,
190190 total_lora_iters ,
191191 ckpt_format = lora_resume_cfg .checkpoint .ckpt_format ,
192- thread_count = lora_resume_cfg .checkpoint .thread_count ,
192+ storage_writers_per_rank = lora_resume_cfg .checkpoint .storage_writers_per_rank ,
193193 )
194194 verify_peft_checkpoint_smaller (
195195 pretrain_checkpoint_dir , lora_checkpoint_dir , pretrain_iters , initial_lora_iters
@@ -227,7 +227,7 @@ def test_lora_finetune_with_packed_sequences(self, tmp_path):
227227 pretrain_checkpoint_dir ,
228228 pretrain_iters ,
229229 ckpt_format = pretrain_cfg .checkpoint .ckpt_format ,
230- thread_count = pretrain_cfg .checkpoint .thread_count ,
230+ storage_writers_per_rank = pretrain_cfg .checkpoint .storage_writers_per_rank ,
231231 )
232232
233233 # Create LoRA config with packed sequences and run finetuning
@@ -248,7 +248,7 @@ def test_lora_finetune_with_packed_sequences(self, tmp_path):
248248 lora_checkpoint_dir ,
249249 lora_iters ,
250250 ckpt_format = lora_cfg .checkpoint .ckpt_format ,
251- thread_count = lora_cfg .checkpoint .thread_count ,
251+ storage_writers_per_rank = lora_cfg .checkpoint .storage_writers_per_rank ,
252252 )
253253 verify_peft_checkpoint_smaller (pretrain_checkpoint_dir , lora_checkpoint_dir , pretrain_iters , lora_iters )
254254
0 commit comments