File tree Expand file tree Collapse file tree 1 file changed +4
-2
lines changed
src/lightning/pytorch/loops/optimization Expand file tree Collapse file tree 1 file changed +4
-2
lines changed Original file line number Diff line number Diff line change 2323from typing_extensions import override
2424
2525import lightning .pytorch as pl
26+ from lightning .fabric .utilities .warnings import PossibleUserWarning
2627from lightning .pytorch .loops .loop import _Loop
2728from lightning .pytorch .loops .optimization .closure import AbstractClosure , OutputResult
2829from lightning .pytorch .loops .progress import _OptimizationProgress
2930from lightning .pytorch .loops .utilities import _block_parallel_sync_behavior
3031from lightning .pytorch .trainer import call
3132from lightning .pytorch .utilities .exceptions import MisconfigurationException
32- from lightning .pytorch .utilities .rank_zero import WarningCache
33+ from lightning .pytorch .utilities .rank_zero import WarningCache , rank_zero_warn
3334from lightning .pytorch .utilities .types import STEP_OUTPUT
3435
3536
@@ -320,10 +321,11 @@ def _training_step(self, kwargs: OrderedDict) -> ClosureResult:
320321 self .trainer .strategy .post_training_step () # unused hook - call anyway for backward compatibility
321322
322323 if training_step_output is None and trainer .world_size > 1 :
323- raise RuntimeError (
324+ rank_zero_warn (
324325 "Skipping the `training_step` by returning None in distributed training is not supported."
325326 " It is recommended that you rewrite your training logic to avoid having to skip the step in the first"
326327 " place."
328+ category = PossibleUserWarning ,
327329 )
328330
329331 return self .output_result_cls .from_training_step_output (training_step_output , trainer .accumulate_grad_batches )
You can’t perform that action at this time.
0 commit comments