Skip to content

Commit 56d05a3

Browse files
Alan ChuAlan Chu
authored andcommitted
remove print statement
1 parent 0e570a8 commit 56d05a3

File tree

1 file changed

+7
-9
lines changed

1 file changed

+7
-9
lines changed

tests/tests_pytorch/callbacks/test_finetuning_callback.py

Lines changed: 7 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -19,12 +19,11 @@
1919
from lightning.pytorch import LightningModule, Trainer, seed_everything
2020
from lightning.pytorch.callbacks import BackboneFinetuning, BaseFinetuning, ModelCheckpoint
2121
from lightning.pytorch.demos.boring_classes import BoringModel, RandomDataset
22+
from tests_pytorch.helpers.runif import RunIf
2223
from torch import nn
2324
from torch.optim import SGD, Optimizer
2425
from torch.utils.data import DataLoader
2526

26-
from tests_pytorch.helpers.runif import RunIf
27-
2827

2928
class TestBackboneFinetuningCallback(BackboneFinetuning):
3029
def on_train_epoch_start(self, trainer, pl_module):
@@ -283,10 +282,12 @@ def test_complex_nested_model():
283282
directly themselves rather than exclusively their submodules containing parameters."""
284283

285284
model = nn.Sequential(
286-
OrderedDict([
287-
("encoder", nn.Sequential(ConvBlockParam(3, 64), ConvBlock(64, 128))),
288-
("decoder", ConvBlock(128, 10)),
289-
])
285+
OrderedDict(
286+
[
287+
("encoder", nn.Sequential(ConvBlockParam(3, 64), ConvBlock(64, 128))),
288+
("decoder", ConvBlock(128, 10)),
289+
]
290+
)
290291
)
291292

292293
# There are 10 leaf modules or parent modules w/ parameters in the test model
@@ -346,8 +347,6 @@ def test_callbacks_restore(tmp_path):
346347
assert len(callback._internal_optimizer_metadata) == 1
347348

348349
# only 2 param groups
349-
print("##########")
350-
print(callback._internal_optimizer_metadata[0])
351350
assert len(callback._internal_optimizer_metadata[0]) == 2
352351

353352
# original parameters
@@ -470,7 +469,6 @@ def training_step(self, batch, batch_idx):
470469
def configure_optimizers(self):
471470
return torch.optim.SGD(self.parameters(), lr=0.1)
472471

473-
print("start of the test")
474472
model = TestModel()
475473
callback = TrackingFinetuningCallback()
476474
trainer = Trainer(

0 commit comments

Comments
 (0)