Skip to content

Commit fe912ff

Browse files
committed
merge conflict fix
2 parents e5d67d9 + 7642ff2 commit fe912ff

File tree

2 files changed

+7
-7
lines changed

2 files changed

+7
-7
lines changed

docs/source-pytorch/common/trainer.rst

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -774,7 +774,7 @@ Useful for quickly debugging or trying to overfit on purpose.
774774

775775
# overfit on 10 consistent train batches & 10 consistent val batches
776776
trainer = Trainer(overfit_batches=10)
777-
777+
778778
# debug using a single consistent train batch and a single consistent val batch
779779

780780

@@ -962,7 +962,7 @@ Additionally, you can pass a strategy object.
962962
See Also:
963963
- :ref:`Multi GPU Training <multi_gpu>`.
964964
- :doc:`Model Parallel GPU training guide <../advanced/model_parallel>`.
965-
- :doc:`TPU training guide <../accelerators/tpu>`.
965+
- :doc:`TPU training guide <../accelerators/tpu>`.
966966

967967

968968
sync_batchnorm

src/lightning/pytorch/trainer/connectors/data_connector.py

Lines changed: 5 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -245,10 +245,10 @@ def _get_distributed_sampler(
245245

246246
def _resolve_overfit_batches(combined_loader: CombinedLoader, mode: RunningStage) -> None:
247247
"""Resolve overfit batches by disabling shuffling.
248-
249-
When overfit_batches > 0, this function ensures that sequential sampling is used
250-
without shuffling for consistent batches across epochs. Training and validation
251-
use different sets of data.
248+
249+
When overfit_batches > 0, this function ensures that sequential sampling is used without shuffling for consistent
250+
batches across epochs. Training and validation use different sets of data.
251+
252252
"""
253253
all_have_sequential_sampler = all(
254254
isinstance(dl.sampler, SequentialSampler) for dl in combined_loader.flattened if hasattr(dl, "sampler")
@@ -260,7 +260,7 @@ def _resolve_overfit_batches(combined_loader: CombinedLoader, mode: RunningStage
260260
f"You requested to overfit but enabled {mode.dataloader_prefix} dataloader shuffling."
261261
f" We are turning off the {mode.dataloader_prefix} dataloader shuffling for you."
262262
)
263-
263+
264264
updated = [
265265
_update_dataloader(dl, sampler=SequentialSampler(dl.dataset), mode=mode) if hasattr(dl, "dataset") else dl
266266
for dl in combined_loader.flattened

0 commit comments

Comments
 (0)