Skip to content

Commit 49e2fab

Browse files
[pre-commit.ci] auto fixes from pre-commit.com hooks
for more information, see https://pre-commit.ci
1 parent f50b3a9 commit 49e2fab

File tree

1 file changed

+3
-8
lines changed

1 file changed

+3
-8
lines changed

src/lightning/pytorch/loops/training_epoch_loop.py

Lines changed: 3 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -277,7 +277,7 @@ def advance(self, data_fetcher: _DataFetcher) -> None:
277277
# =====================================================================
278278
# FINAL: Check for SIGTERM broadcast and exit synchronously across ranks
279279
from lightning.pytorch.utilities.exceptions import SIGTERMException
280-
280+
281281
# Rank 0 broadcasts SIGTERM status
282282
if (
283283
dist.is_available()
@@ -293,13 +293,9 @@ def advance(self, data_fetcher: _DataFetcher) -> None:
293293
dist.broadcast(sigterm_tensor, src=0)
294294
except Exception:
295295
pass # Ignore broadcast error on non-DDP setups
296-
296+
297297
# All ranks listen for SIGTERM
298-
if (
299-
dist.is_available()
300-
and dist.is_initialized()
301-
and self.trainer.world_size > 1
302-
):
298+
if dist.is_available() and dist.is_initialized() and self.trainer.world_size > 1:
303299
try:
304300
sigterm_tensor = torch.tensor([0], device=self.trainer.strategy.root_device)
305301
dist.broadcast(sigterm_tensor, src=0)
@@ -310,7 +306,6 @@ def advance(self, data_fetcher: _DataFetcher) -> None:
310306
pass # Fallback for CPU/CI environments
311307
# =====================================================================
312308

313-
314309
if using_dataloader_iter := isinstance(data_fetcher, _DataLoaderIterDataFetcher):
315310
dataloader_iter = next(data_fetcher)
316311
# hook's batch_idx and dataloader_idx arguments correctness cannot be guaranteed in this setting

0 commit comments

Comments
 (0)