Skip to content

Commit e9b66cb

Browse files
Adjust checkpointing frequency when time-based validation is active
1 parent 98f608f commit e9b66cb

File tree

1 file changed

+4
-0
lines changed

1 file changed

+4
-0
lines changed

src/lightning/pytorch/callbacks/model_checkpoint.py

Lines changed: 4 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -467,6 +467,10 @@ def _should_save_on_train_epoch_end(self, trainer: "pl.Trainer") -> bool:
467467
if self._save_on_train_epoch_end is not None:
468468
return self._save_on_train_epoch_end
469469

470+
# time-based validation: always defer saving to validation end
471+
if getattr(trainer, "_val_check_time_interval", None) is not None:
472+
return False
473+
470474
# if `check_val_every_n_epoch != 1`, we can't say when the validation dataloader will be loaded
471475
# so let's not enforce saving at every training epoch end
472476
if trainer.check_val_every_n_epoch != 1:

0 commit comments

Comments
 (0)