Skip to content

Commit d9a620a

Browse files
[pre-commit.ci] auto fixes from pre-commit.com hooks
for more information, see https://pre-commit.ci
1 parent fed9783 commit d9a620a

File tree

2 files changed

+9
-2
lines changed

2 files changed

+9
-2
lines changed

tests/tests_pytorch/helpers/advanced_models.py

Lines changed: 6 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -218,4 +218,9 @@ def configure_optimizers(self):
218218
return torch.optim.Adam(self.parameters(), lr=0.02)
219219

220220
def train_dataloader(self):
221-
return DataLoader(MNIST(root=_PATH_DATASETS, train=True, download=True), batch_size=128, num_workers=1, persistent_workers=True)
221+
return DataLoader(
222+
MNIST(root=_PATH_DATASETS, train=True, download=True),
223+
batch_size=128,
224+
num_workers=1,
225+
persistent_workers=True,
226+
)

tests/tests_pytorch/trainer/connectors/test_data_connector.py

Lines changed: 3 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -252,7 +252,9 @@ def test_update_dataloader_with_multiprocessing_context():
252252
"""This test verifies that `use_distributed_sampler` conserves multiprocessing context."""
253253
train = RandomDataset(32, 64)
254254
context = "spawn"
255-
train = DataLoader(train, batch_size=32, num_workers=2, multiprocessing_context=context, shuffle=True, persistent_workers=True)
255+
train = DataLoader(
256+
train, batch_size=32, num_workers=2, multiprocessing_context=context, shuffle=True, persistent_workers=True
257+
)
256258
new_data_loader = _update_dataloader(train, SequentialSampler(train.dataset))
257259
assert new_data_loader.multiprocessing_context == train.multiprocessing_context
258260

0 commit comments

Comments
 (0)