Skip to content

Commit e26132a

Browse files
pre-commit-ci[bot]mieshkiwrk
authored andcommitted
[pre-commit.ci] auto fixes from pre-commit.com hooks
for more information, see https://pre-commit.ci
1 parent 1bc2ce7 commit e26132a

File tree

1 file changed

+4
-4
lines changed

1 file changed

+4
-4
lines changed

src/lightning/pytorch/trainer/trainer.py

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -25,7 +25,7 @@
2525
import os
2626
from contextlib import contextmanager
2727
from datetime import timedelta
28-
from typing import Any, Dict, Generator, Iterable, List, Optional, Union, Callable
28+
from typing import Any, Callable, Dict, Generator, Iterable, List, Optional, Union
2929
from weakref import proxy
3030

3131
import torch
@@ -127,7 +127,7 @@ def __init__(
127127
sync_batchnorm: bool = False,
128128
reload_dataloaders_every_n_epochs: int = 0,
129129
default_root_dir: Optional[_PATH] = None,
130-
compile_fn: Optional[Callable] = None
130+
compile_fn: Optional[Callable] = None,
131131
) -> None:
132132
r"""Customize every aspect of training via flags.
133133
@@ -470,7 +470,7 @@ def __init__(
470470
self.state = TrainerState()
471471

472472
self.compile_fn = compile_fn
473-
473+
474474
# configure profiler
475475
setup._init_profiler(self, profiler)
476476

@@ -962,7 +962,7 @@ def _run(
962962
# compile if compile_fn provided after configured strategy
963963
if self.compile_fn is not None:
964964
self.strategy.model = self.compile_fn(self.strategy.model)
965-
965+
966966
# hook
967967
if self.state.fn == TrainerFn.FITTING:
968968
call._call_callback_hooks(self, "on_fit_start")

0 commit comments

Comments
 (0)