Skip to content

Commit 55271b1

Browse files
awaelchlilexierule
authored andcommitted
Actually show deprecation warnings and their line level [2/2] (#8002)
Co-authored-by: Adrian Wälchli <[email protected]>
1 parent 662b6d6 commit 55271b1

File tree

19 files changed

+154
-74
lines changed

19 files changed

+154
-74
lines changed

.azure-pipelines/ipu-tests.yml

Lines changed: 1 addition & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -53,12 +53,9 @@ jobs:
5353
export GIT_TERMINAL_PROMPT=1
5454
python -c "fname = 'requirements/extra.txt' ; lines = [line for line in open(fname).readlines() if 'fairscale' not in line] ; open(fname, 'w').writelines(lines)"
5555
python -c "fname = 'requirements/extra.txt' ; lines = [line for line in open(fname).readlines() if 'horovod' not in line] ; open(fname, 'w').writelines(lines)"
56-
5756
python ./requirements/adjust_versions.py requirements/extra.txt
5857
python ./requirements/adjust_versions.py requirements/examples.txt
59-
60-
pip install --requirement ./requirements/devel.txt --upgrade-strategy only-if-needed
61-
58+
pip install . --requirement requirements/devel.txt
6259
pip list
6360
displayName: 'Install dependencies'
6461

pytorch_lightning/core/datamodule.py

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -20,6 +20,7 @@
2020
from torch.utils.data import DataLoader, Dataset, IterableDataset
2121

2222
from pytorch_lightning.core.hooks import CheckpointHooks, DataHooks
23+
from pytorch_lightning.utilities import rank_zero_deprecation
2324
from pytorch_lightning.utilities.argparse import add_argparse_args, from_argparse_args, get_init_arguments_and_types
2425

2526

pytorch_lightning/core/grads.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -18,7 +18,7 @@
1818

1919
from torch.nn import Module
2020

21-
from pytorch_lightning.utilities.distributed import rank_zero_deprecation
21+
from pytorch_lightning.utilities import rank_zero_deprecation
2222
from pytorch_lightning.utilities.grads import grad_norm as new_grad_norm
2323

2424

pytorch_lightning/loggers/csv_logs.py

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -29,7 +29,8 @@
2929

3030
from pytorch_lightning.core.saving import save_hparams_to_yaml
3131
from pytorch_lightning.loggers.base import LightningLoggerBase, rank_zero_experiment
32-
from pytorch_lightning.utilities.distributed import rank_zero_only, rank_zero_warn
32+
from pytorch_lightning.utilities import rank_zero_warn
33+
from pytorch_lightning.utilities.distributed import rank_zero_only
3334

3435
log = logging.getLogger(__name__)
3536

pytorch_lightning/loggers/test_tube.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -20,8 +20,8 @@
2020

2121
from pytorch_lightning.core.lightning import LightningModule
2222
from pytorch_lightning.loggers.base import LightningLoggerBase, rank_zero_experiment
23-
from pytorch_lightning.utilities import _module_available
24-
from pytorch_lightning.utilities.distributed import rank_zero_only, rank_zero_warn
23+
from pytorch_lightning.utilities import _module_available, rank_zero_warn
24+
from pytorch_lightning.utilities.distributed import rank_zero_only
2525

2626
_TESTTUBE_AVAILABLE = _module_available("test_tube")
2727

pytorch_lightning/plugins/training_type/ddp_spawn.py

Lines changed: 6 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -28,10 +28,14 @@
2828
from pytorch_lightning.plugins.environments.cluster_environment import ClusterEnvironment
2929
from pytorch_lightning.plugins.training_type.parallel import ParallelPlugin
3030
from pytorch_lightning.trainer.states import TrainerFn
31-
from pytorch_lightning.utilities import _TORCH_GREATER_EQUAL_1_7, _TORCH_GREATER_EQUAL_1_8
31+
from pytorch_lightning.utilities import (
32+
_TORCH_GREATER_EQUAL_1_7,
33+
_TORCH_GREATER_EQUAL_1_8,
34+
rank_zero_warn,
35+
)
3236
from pytorch_lightning.utilities.cloud_io import atomic_save
3337
from pytorch_lightning.utilities.cloud_io import load as pl_load
34-
from pytorch_lightning.utilities.distributed import rank_zero_only, rank_zero_warn, ReduceOp, sync_ddp_if_available
38+
from pytorch_lightning.utilities.distributed import rank_zero_only, ReduceOp, sync_ddp_if_available
3539
from pytorch_lightning.utilities.seed import reset_seed
3640

3741
if _TORCH_GREATER_EQUAL_1_8:

pytorch_lightning/plugins/training_type/deepspeed.py

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -33,6 +33,7 @@
3333
from pytorch_lightning.utilities.distributed import rank_zero_info, rank_zero_only
3434
from pytorch_lightning.utilities.exceptions import MisconfigurationException
3535
from pytorch_lightning.utilities.imports import _DEEPSPEED_AVAILABLE
36+
from pytorch_lightning.utilities.warnings import _warn, LightningDeprecationWarning
3637

3738
if _DEEPSPEED_AVAILABLE:
3839
import deepspeed

pytorch_lightning/profiler/pytorch.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -24,7 +24,7 @@
2424
from torch.autograd.profiler import record_function
2525

2626
from pytorch_lightning.profiler import BaseProfiler
27-
from pytorch_lightning.utilities.distributed import rank_zero_deprecation, rank_zero_warn
27+
from pytorch_lightning.utilities import rank_zero_deprecation, rank_zero_warn
2828
from pytorch_lightning.utilities.exceptions import MisconfigurationException
2929
from pytorch_lightning.utilities.imports import _KINETO_AVAILABLE
3030

@@ -351,7 +351,7 @@ def __deprecation_check(
351351
if profiled_functions is not None:
352352
rank_zero_deprecation(
353353
"`PyTorchProfiler.profiled_functions` has been renamed to"
354-
" `record_functions` in v1.3 and will be removed in v1.5",
354+
" `record_functions` in v1.3 and will be removed in v1.5"
355355
)
356356
if not record_functions:
357357
record_functions |= set(profiled_functions)

pytorch_lightning/trainer/connectors/accelerator_connector.py

Lines changed: 3 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -60,8 +60,10 @@
6060
device_parser,
6161
DeviceType,
6262
DistributedType,
63+
rank_zero_deprecation,
64+
rank_zero_info,
65+
rank_zero_warn,
6366
)
64-
from pytorch_lightning.utilities.distributed import rank_zero_deprecation, rank_zero_info, rank_zero_warn
6567
from pytorch_lightning.utilities.exceptions import MisconfigurationException
6668

6769
if _HOROVOD_AVAILABLE:

pytorch_lightning/trainer/connectors/data_connector.py

Lines changed: 21 additions & 23 deletions
Original file line numberDiff line numberDiff line change
@@ -113,30 +113,28 @@ def attach_dataloaders(
113113
def attach_datamodule(
114114
self, model: 'pl.LightningModule', datamodule: Optional['pl.LightningDataModule'] = None
115115
) -> None:
116-
# We use datamodule if it's been provided, otherwise we check model for it
117-
datamodule = datamodule or getattr(model, 'datamodule', None)
118-
119116
# If we have a datamodule, attach necessary hooks + dataloaders
120-
if datamodule:
121-
122-
# Override loader hooks
123-
dl_methods = ('train_dataloader', 'val_dataloader', 'test_dataloader', 'predict_dataloader')
124-
for method in dl_methods:
125-
if is_overridden(method, datamodule):
126-
setattr(model, method, getattr(datamodule, method))
127-
128-
# Override data transfer hooks if dataset-specific to_device logic has been defined in datamodule
129-
batch_transfer_hooks = ('on_before_batch_transfer', 'transfer_batch_to_device', 'on_after_batch_transfer')
130-
for hook in batch_transfer_hooks:
131-
if is_overridden(hook, datamodule):
132-
setattr(model, hook, getattr(datamodule, hook))
133-
134-
self.trainer.datamodule = datamodule
135-
datamodule.trainer = self.trainer
136-
137-
# experimental feature for Flash
138-
if hasattr(datamodule, "data_pipeline"):
139-
model.data_pipeline = datamodule.data_pipeline
117+
if datamodule is None:
118+
return
119+
120+
# Override loader hooks
121+
dl_methods = ('train_dataloader', 'val_dataloader', 'test_dataloader', 'predict_dataloader')
122+
for method in dl_methods:
123+
if is_overridden(method, datamodule):
124+
setattr(model, method, getattr(datamodule, method))
125+
126+
# Override data transfer hooks if dataset-specific to_device logic has been defined in datamodule
127+
batch_transfer_hooks = ('on_before_batch_transfer', 'transfer_batch_to_device', 'on_after_batch_transfer')
128+
for hook in batch_transfer_hooks:
129+
if is_overridden(hook, datamodule):
130+
setattr(model, hook, getattr(datamodule, hook))
131+
132+
self.trainer.datamodule = datamodule
133+
datamodule.trainer = self.trainer
134+
135+
# experimental feature for Flash
136+
if hasattr(datamodule, "data_pipeline"):
137+
model.data_pipeline = datamodule.data_pipeline
140138

141139

142140
class _PatchDataLoader(object):

0 commit comments

Comments
 (0)