Skip to content

Commit 0cb0c7a

Browse files
Bordalexierule
authored andcommitted
formatting
1 parent 97adde9 commit 0cb0c7a

File tree

15 files changed

+28
-24
lines changed

15 files changed

+28
-24
lines changed

pytorch_lightning/callbacks/pruning.py

Lines changed: 2 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -422,9 +422,8 @@ def sanitize_parameters_to_prune(
422422
current_modules = [m for m in pl_module.modules() if not isinstance(m, _MODULE_CONTAINERS)]
423423

424424
if parameters_to_prune is None:
425-
parameters_to_prune = [
426-
(m, p) for p in parameters for m in current_modules if getattr(m, p, None) is not None
427-
]
425+
parameters_to_prune = [(m, p) for p in parameters for m in current_modules
426+
if getattr(m, p, None) is not None]
428427
elif (
429428
isinstance(parameters_to_prune, (list, tuple)) and len(parameters_to_prune) > 0
430429
and all(len(p) == 2 for p in parameters_to_prune)

pytorch_lightning/plugins/training_type/ddp.py

Lines changed: 1 addition & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -207,9 +207,7 @@ def pre_configure_ddp(self):
207207
# Many models require setting this parameter to True, as there are corner cases
208208
# when not all parameter backward hooks are fired by the autograd engine even if require_grad is set to True.
209209
# This flag does come with a performance hit, so it is suggested to disable in cases where it is possible.
210-
self._ddp_kwargs["find_unused_parameters"] = self._ddp_kwargs.get(
211-
"find_unused_parameters", True
212-
)
210+
self._ddp_kwargs["find_unused_parameters"] = self._ddp_kwargs.get("find_unused_parameters", True)
213211
# todo: PyTorch 1.7.0 DDP introduces ``self.reducer._rebuild_buckets()`` breaking manual_optimization
214212
if _TORCH_GREATER_EQUAL_1_7 and not self.lightning_module.automatic_optimization and not self._ddp_kwargs.get(
215213
"find_unused_parameters", False

pytorch_lightning/plugins/training_type/ddp_spawn.py

Lines changed: 1 addition & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -176,9 +176,7 @@ def pre_configure_ddp(self):
176176
# Many models require setting this parameter to True, as there are corner cases
177177
# when not all parameter backward hooks are fired by the autograd engine even if require_grad is set to True.
178178
# This flag does come with a performance hit, so it is suggested to disable in cases where it is possible.
179-
self._ddp_kwargs["find_unused_parameters"] = self._ddp_kwargs.get(
180-
"find_unused_parameters", True
181-
)
179+
self._ddp_kwargs["find_unused_parameters"] = self._ddp_kwargs.get("find_unused_parameters", True)
182180
# todo: PyTorch 1.7.0 DDP introduces ``self.reducer._rebuild_buckets()`` breaking manual_optimization
183181
if _TORCH_GREATER_EQUAL_1_7 and not self.lightning_module.automatic_optimization and not self._ddp_kwargs.get(
184182
"find_unused_parameters", False

pytorch_lightning/trainer/callback_hook.py

Lines changed: 1 addition & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -214,8 +214,7 @@ def on_save_checkpoint(self, checkpoint: Dict[str, Any]) -> Dict[Type, dict]:
214214
rank_zero_warn(
215215
"`Callback.on_save_checkpoint` signature has changed in v1.3."
216216
" A `checkpoint` parameter has been added."
217-
" Support for the old signature will be removed in v1.5",
218-
DeprecationWarning
217+
" Support for the old signature will be removed in v1.5", DeprecationWarning
219218
)
220219
state = callback.on_save_checkpoint(self, self.lightning_module) # noqa: parameter-unfilled
221220
else:

pytorch_lightning/trainer/connectors/env_vars_connector.py

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -23,6 +23,7 @@ def _defaults_from_env_vars(fn: Callable) -> Callable:
2323
Decorator for :class:`~pytorch_lightning.trainer.trainer.Trainer` methods for which
2424
input arguments should be moved automatically to the correct device.
2525
"""
26+
2627
@wraps(fn)
2728
def insert_env_defaults(self, *args, **kwargs):
2829
cls = self.__class__ # get the class

pytorch_lightning/utilities/__init__.py

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -14,6 +14,7 @@
1414
"""General utilities"""
1515

1616
import numpy
17+
1718
from pytorch_lightning.utilities.apply_func import move_data_to_device # noqa: F401
1819
from pytorch_lightning.utilities.distributed import ( # noqa: F401
1920
AllGatherGrad,

pytorch_lightning/utilities/cloud_io.py

Lines changed: 2 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -12,17 +12,16 @@
1212
# See the License for the specific language governing permissions and
1313
# limitations under the License.
1414

15-
import os
1615
import io
16+
import os
1717
from pathlib import Path
1818
from typing import IO, Union
1919

2020
import fsspec
21+
import torch
2122
from fsspec.implementations.local import LocalFileSystem
2223
from packaging.version import Version
2324

24-
import torch
25-
2625

2726
class _LightningLocalFileSystem(LocalFileSystem):
2827
"""Extension of ``fsspec.implementations.local.LocalFileSystem`` where ``LightningLocalFileSystem.isdir`` behaves

tests/accelerators/test_accelerator_connector.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -33,8 +33,8 @@
3333
PrecisionPlugin,
3434
SingleDevicePlugin,
3535
)
36-
from pytorch_lightning.utilities import _DEEPSPEED_AVAILABLE
3736
from pytorch_lightning.plugins.environments import LightningEnvironment, SLURMEnvironment, TorchElasticEnvironment
37+
from pytorch_lightning.utilities import _DEEPSPEED_AVAILABLE
3838
from tests.helpers.boring_model import BoringModel
3939
from tests.helpers.runif import RunIf
4040

tests/accelerators/test_cpu.py

Lines changed: 1 addition & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -14,8 +14,7 @@ def test_unsupported_precision_plugins():
1414
trainer = Mock()
1515
model = Mock()
1616
accelerator = CPUAccelerator(
17-
training_type_plugin=SingleDevicePlugin(torch.device("cpu")),
18-
precision_plugin=MixedPrecisionPlugin()
17+
training_type_plugin=SingleDevicePlugin(torch.device("cpu")), precision_plugin=MixedPrecisionPlugin()
1918
)
2019
with pytest.raises(MisconfigurationException, match=r"amp \+ cpu is not supported."):
2120
accelerator.setup(trainer=trainer, model=model)

tests/callbacks/test_swa.py

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -27,9 +27,10 @@
2727
from tests.helpers.runif import RunIf
2828

2929
if _TORCH_GREATER_EQUAL_1_6:
30-
from pytorch_lightning.callbacks import StochasticWeightAveraging
3130
from torch.optim.swa_utils import SWALR
3231

32+
from pytorch_lightning.callbacks import StochasticWeightAveraging
33+
3334
class SwaTestModel(BoringModel):
3435

3536
def __init__(self, batchnorm: bool = True, interval: str = "epoch"):

0 commit comments

Comments
 (0)