Skip to content

Commit a56c12c

Browse files
authored
Switch v1.10 deprecation references (#16321)
1 parent e63d813 commit a56c12c

File tree

31 files changed

+518
-543
lines changed

31 files changed

+518
-543
lines changed

src/pytorch_lightning/accelerators/accelerator.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -31,11 +31,11 @@ class Accelerator(_Accelerator, ABC):
3131
def setup_environment(self, root_device: torch.device) -> None:
3232
"""
3333
.. deprecated:: v1.8.0
34-
This hook was deprecated in v1.8.0 and will be removed in v1.10.0. Please use ``setup_device()`` instead.
34+
This hook was deprecated in v1.8.0 and will be removed in v2.0.0. Please use ``setup_device()`` instead.
3535
"""
3636
rank_zero_deprecation(
3737
"`Accelerator.setup_environment` has been deprecated in deprecated in v1.8.0 and will be removed in"
38-
" v1.10.0. Please use ``setup_device()`` instead."
38+
" v2.0.0. Please use `setup_device()` instead."
3939
)
4040
self.setup_device(root_device)
4141

src/pytorch_lightning/cli.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -279,7 +279,7 @@ def __init__(
279279
args: ArgsType = None,
280280
run: bool = True,
281281
auto_configure_optimizers: bool = True,
282-
**kwargs: Any, # Remove with deprecations of v1.10
282+
**kwargs: Any, # Remove with deprecations of v2.0.0
283283
) -> None:
284284
"""Receives as input pytorch-lightning classes (or callables which return pytorch-lightning classes), which
285285
are called / instantiated using a parsed configuration file and / or command line args.
@@ -364,7 +364,7 @@ def _handle_deprecated_params(self, kwargs: dict) -> None:
364364
self.save_config_kwargs[key] = value
365365
rank_zero_deprecation(
366366
f"LightningCLI's {name!r} init parameter is deprecated from v1.8 and will "
367-
f"be removed in v1.10. Use `save_config_kwargs={{'{key}': ...}}` instead."
367+
f"be removed in v2.0.0. Use `save_config_kwargs={{'{key}': ...}}` instead."
368368
)
369369

370370
for name in kwargs.keys() & ["description", "env_prefix", "env_parse"]:

src/pytorch_lightning/core/mixins/device_dtype_mixin.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -20,6 +20,6 @@ class DeviceDtypeModuleMixin(_DeviceDtypeModuleMixin):
2020
def __init__(self) -> None:
2121
rank_zero_deprecation(
2222
"`pytorch_lightning.core.mixins.DeviceDtypeModuleMixin` has been deprecated in v1.8.0 and will be"
23-
" removed in v1.10.0. This class is internal but you can copy over its implementation."
23+
" removed in v2.0.0. This class is internal but you can copy over its implementation."
2424
)
2525
super().__init__()

src/pytorch_lightning/lite/lite.py

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -85,13 +85,13 @@ class LightningLite(Fabric, ABC):
8585
gpus: Provides the same function as the ``devices`` argument but implies ``accelerator="gpu"``.
8686
8787
.. deprecated:: v1.8.0
88-
``gpus`` has been deprecated in v1.8.0 and will be removed in v1.10.0.
88+
``gpus`` has been deprecated in v1.8.0 and will be removed in v2.0.0.
8989
Please use ``accelerator='gpu'`` and ``devices=x`` instead.
9090
9191
tpu_cores: Provides the same function as the ``devices`` argument but implies ``accelerator="tpu"``.
9292
9393
.. deprecated:: v1.8.0
94-
``tpu_cores`` has been deprecated in v1.8.0 and will be removed in v1.10.0.
94+
``tpu_cores`` has been deprecated in v1.8.0 and will be removed in v2.0.0.
9595
Please use ``accelerator='tpu'`` and ``devices=x`` instead.
9696
"""
9797

@@ -154,12 +154,12 @@ def _convert_deprecated_device_flags(
154154
if gpus is not None:
155155
rank_zero_deprecation(
156156
f"Setting `Lite(gpus={gpus!r})` is deprecated in v1.8.0 and will be removed"
157-
f" in v1.10.0. Please use `Lite(accelerator='gpu', devices={gpus!r})` instead."
157+
f" in v2.0.0. Please use `Lite(accelerator='gpu', devices={gpus!r})` instead."
158158
)
159159
if tpu_cores is not None:
160160
rank_zero_deprecation(
161161
f"Setting `Lite(tpu_cores={tpu_cores!r})` is deprecated in v1.8.0 and will be removed"
162-
f" in v1.10.0. Please use `Lite(accelerator='tpu', devices={tpu_cores!r})` instead."
162+
f" in v2.0.0. Please use `Lite(accelerator='tpu', devices={tpu_cores!r})` instead."
163163
)
164164
deprecated_devices_specific_flag = gpus or tpu_cores
165165
if deprecated_devices_specific_flag and deprecated_devices_specific_flag not in ([], 0, "0"):

src/pytorch_lightning/loops/optimization/optimizer_loop.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -361,7 +361,7 @@ def _optimizer_step(
361361
if is_param_in_hook_signature(pl_module.optimizer_step, "using_native_amp", explicit=True):
362362
rank_zero_deprecation(
363363
"The NVIDIA/apex AMP implementation has been deprecated upstream. Consequently, its integration inside"
364-
" PyTorch Lightning has been deprecated in v1.9.0 and will be removed in v1.10.0."
364+
" PyTorch Lightning has been deprecated in v1.9.0 and will be removed in v2.0.0."
365365
f" The `{type(pl_module).__name__}.optimizer_step()` hook is overridden, including the"
366366
" `using_native_amp` argument. Removing this argument will avoid this message, you can expect it to"
367367
" return True."

src/pytorch_lightning/overrides/base.py

Lines changed: 5 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -75,7 +75,7 @@ def __init__(
7575
"`forward_module` must be a `LightningModule` instance or have an attribute `.module` pointing to one,"
7676
f" got: {forward_module.__class__.__qualname__}"
7777
)
78-
# TODO: In v1.10.0, remove the Optional type from forward_module and remove the assertion
78+
# TODO: In v2.0.0, remove the Optional type from forward_module and remove the assertion
7979
assert forward_module is not None
8080
self._forward_module = forward_module
8181

@@ -118,11 +118,11 @@ def _validate_init_arguments(
118118
pl_module: Optional[Union["pl.LightningModule", _LightningPrecisionModuleWrapperBase]] = None,
119119
forward_module: Optional[Union["pl.LightningModule", _LightningPrecisionModuleWrapperBase]] = None,
120120
) -> None:
121-
# TODO: In v1.10.0, remove this method and mark the forward_module init argument in all subclasses as required
121+
# TODO: In v2.0.0, remove this method and mark the forward_module init argument in all subclasses as required
122122
if pl_module is not None:
123123
rank_zero_deprecation(
124124
f"The argument `pl_module` in `{cls.__name__}` is deprecated in v1.8.0 and will be removed in"
125-
" v1.10.0. Please use `forward_module` instead."
125+
" v2.0.0. Please use `forward_module` instead."
126126
)
127127
elif forward_module is None:
128128
raise ValueError("Argument `forward_module` is required.")
@@ -133,7 +133,7 @@ def unwrap_lightning_module(wrapped_model: nn.Module, _suppress_warning: bool =
133133
attributes on the wrapper.
134134
135135
.. deprecated:: v1.8.0
136-
The function ``unwrap_lightning_module`` is deprecated in v1.8.0 and will be removed in v1.10.0. Access the
136+
The function ``unwrap_lightning_module`` is deprecated in v1.8.0 and will be removed in v2.0.0. Access the
137137
``LightningModule`` directly through the strategy attribute ``Strategy.lightning_module``.
138138
139139
Raises:
@@ -142,7 +142,7 @@ def unwrap_lightning_module(wrapped_model: nn.Module, _suppress_warning: bool =
142142
"""
143143
if not _suppress_warning:
144144
rank_zero_deprecation(
145-
"The function `unwrap_lightning_module` is deprecated in v1.8.0 and will be removed in v1.10.0. Access the"
145+
"The function `unwrap_lightning_module` is deprecated in v1.8.0 and will be removed in v2.0.0. Access the"
146146
" `LightningModule` directly through the strategy attribute `Strategy.lightning_module`."
147147
)
148148
model = wrapped_model

src/pytorch_lightning/overrides/data_parallel.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -55,7 +55,7 @@ class LightningParallelModule(_LightningModuleWrapperBase):
5555
pl_module: The module to wrap. See description for `forward_module`.
5656
5757
.. deprecated:: v1.8.0
58-
The argument ``pl_module`` is deprecated in v1.8.0 and will be removed in v1.10.0. Please use
58+
The argument ``pl_module`` is deprecated in v1.8.0 and will be removed in v2.0.0. Please use
5959
``forward_module`` instead.
6060
6161
forward_module: The module to wrap. If it's not a ``LightningModule``, it must have an attribute ``.module``

src/pytorch_lightning/overrides/fairscale.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -38,7 +38,7 @@ def unwrap_lightning_module_sharded(wrapped_model: nn.Module) -> "pl.LightningMo
3838
from fairscale.nn.data_parallel.sharded_ddp import ShardedDataParallel
3939

4040
rank_zero_deprecation(
41-
"The function `unwrap_lightning_module_sharded` is deprecated in v1.8.0 and will be removed in v1.10.0."
41+
"The function `unwrap_lightning_module_sharded` is deprecated in v1.8.0 and will be removed in v2.0.0."
4242
" Access the `LightningModule` directly through the strategy attribute `Strategy.lightning_module`."
4343
)
4444
model = wrapped_model

src/pytorch_lightning/plugins/precision/apex_amp.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -36,7 +36,7 @@ def _import_amp_without_deprecation() -> ModuleType:
3636
return amp
3737

3838

39-
# TODO: remove in v1.10.0
39+
# TODO: remove in v2.0.0
4040
class ApexMixedPrecisionPlugin(PrecisionPlugin):
4141
"""Mixed Precision Plugin based on Nvidia/Apex (https://github.com/NVIDIA/apex)"""
4242

@@ -47,7 +47,7 @@ def __init__(self, amp_level: str = "O2") -> None:
4747
rank_zero_deprecation(
4848
"The NVIDIA/apex AMP implementation has been deprecated upstream. Consequently, its integration inside"
4949
f" PyTorch Lightning has been deprecated in v1.9.0. The `{type(self).__name__}` class will be removed in"
50-
" v1.10.0. Please use PyTorch's AMP implementation available in"
50+
" v2.0.0. Please use PyTorch's AMP implementation available in"
5151
" `pytorch_lightning.plugins.MixedPrecisionPlugin` instead."
5252
)
5353
if not _APEX_AVAILABLE:

src/pytorch_lightning/plugins/precision/deepspeed.py

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -55,11 +55,11 @@ def __init__(
5555
amp_level: Optional[str] = None,
5656
) -> None:
5757
if amp_type == "apex":
58-
# TODO: remove in v1.10.0
58+
# TODO: remove in v2.0.0
5959
rank_zero_deprecation(
6060
"The NVIDIA/apex AMP implementation has been deprecated upstream. Consequently, its integration inside"
6161
" PyTorch Lightning has been deprecated in v1.9.0. Support for using it through the DeepSpeed"
62-
" implementation will be removed in v1.10.0."
62+
" implementation will be removed in v2.0.0."
6363
)
6464
if not _APEX_AVAILABLE:
6565
raise MisconfigurationException(
@@ -77,7 +77,7 @@ def __init__(
7777
else:
7878
rank_zero_deprecation(
7979
f"Passing `{type(self).__name__}(amp_type={amp_type!r})` been deprecated in v1.9.0 and will be removed"
80-
f" in v1.10.0. This argument is no longer necessary."
80+
f" in v2.0.0. This argument is no longer necessary."
8181
)
8282

8383
supported_precision = get_args(_PRECISION_INPUT_STR) + get_args(_PRECISION_INPUT_INT)

0 commit comments

Comments
 (0)