Skip to content

Commit ef35dca

Browse files
[pre-commit.ci] auto fixes from pre-commit.com hooks
for more information, see https://pre-commit.ci
1 parent a205c4a commit ef35dca

File tree

2 files changed

+11
-11
lines changed

2 files changed

+11
-11
lines changed

src/lightning/pytorch/trainer/trainer.py

Lines changed: 3 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -28,8 +28,10 @@
2828
from typing import Any, Dict, Generator, Iterable, List, Optional, Union
2929
from weakref import proxy
3030

31-
import lightning.pytorch as pl
3231
import torch
32+
from torch.optim import Optimizer
33+
34+
import lightning.pytorch as pl
3335
from lightning.fabric.utilities.apply_func import convert_tensors_to_scalars
3436
from lightning.fabric.utilities.cloud_io import _is_local_file_protocol
3537
from lightning.fabric.utilities.types import _PATH
@@ -77,7 +79,6 @@
7779
LRSchedulerConfig,
7880
)
7981
from lightning.pytorch.utilities.warnings import PossibleUserWarning
80-
from torch.optim import Optimizer
8182

8283
log = logging.getLogger(__name__)
8384

tests/tests_pytorch/callbacks/test_finetuning_callback.py

Lines changed: 8 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -19,11 +19,12 @@
1919
from lightning.pytorch import LightningModule, Trainer, seed_everything
2020
from lightning.pytorch.callbacks import BackboneFinetuning, BaseFinetuning, ModelCheckpoint
2121
from lightning.pytorch.demos.boring_classes import BoringModel, RandomDataset
22-
from tests_pytorch.helpers.runif import RunIf
2322
from torch import nn
2423
from torch.optim import SGD, Optimizer
2524
from torch.utils.data import DataLoader
2625

26+
from tests_pytorch.helpers.runif import RunIf
27+
2728

2829
class TestBackboneFinetuningCallback(BackboneFinetuning):
2930
def on_train_epoch_start(self, trainer, pl_module):
@@ -282,12 +283,10 @@ def test_complex_nested_model():
282283
directly themselves rather than exclusively their submodules containing parameters."""
283284

284285
model = nn.Sequential(
285-
OrderedDict(
286-
[
287-
("encoder", nn.Sequential(ConvBlockParam(3, 64), ConvBlock(64, 128))),
288-
("decoder", ConvBlock(128, 10)),
289-
]
290-
)
286+
OrderedDict([
287+
("encoder", nn.Sequential(ConvBlockParam(3, 64), ConvBlock(64, 128))),
288+
("decoder", ConvBlock(128, 10)),
289+
])
291290
)
292291

293292
# There are 10 leaf modules or parent modules w/ parameters in the test model
@@ -437,8 +436,8 @@ def test_unsupported_strategies(tmp_path):
437436

438437

439438
def test_finetuning_with_configure_model(tmp_path):
440-
"""Test that BaseFinetuning works correctly with configure_model by ensuring freeze_before_training
441-
is called after configure_model but before training starts."""
439+
"""Test that BaseFinetuning works correctly with configure_model by ensuring freeze_before_training is called after
440+
configure_model but before training starts."""
442441

443442
class TrackingFinetuningCallback(BaseFinetuning):
444443
def __init__(self):

0 commit comments

Comments
 (0)