Skip to content

Commit 0327f6b

Browse files
carmoccarohitgr7
andauthored
Do not warn when the name key is used in the lr_scheduler dict (#5057)
* Do not warn when the name key is used * Missing line * Consistency * Update pytorch_lightning/callbacks/lr_monitor.py * Update docs * Update pytorch_lightning/core/lightning.py Co-authored-by: Rohit Gupta <[email protected]> * Update CHANGELOG Co-authored-by: Rohit Gupta <[email protected]>
1 parent 16feb51 commit 0327f6b

File tree

6 files changed

+67
-17
lines changed

6 files changed

+67
-17
lines changed

CHANGELOG.md

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -45,6 +45,8 @@ The format is based on [Keep a Changelog](http://keepachangelog.com/en/1.0.0/).
4545
- Fixed `LightningOptimizer` exposes optimizer attributes ([#5095](https://github.com/PyTorchLightning/pytorch-lightning/pull/5095))
4646

4747

48+
- Do not warn when the `name` key is used in the `lr_scheduler` dict ([#5057](https://github.com/PyTorchLightning/pytorch-lightning/pull/5057))
49+
4850

4951
## [1.1.0] - 2020-12-09
5052

pytorch_lightning/callbacks/lr_monitor.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -157,7 +157,7 @@ def _find_names(self, lr_schedulers) -> List[str]:
157157
names = []
158158
for scheduler in lr_schedulers:
159159
sch = scheduler['scheduler']
160-
if 'name' in scheduler:
160+
if scheduler['name'] is not None:
161161
name = scheduler['name']
162162
else:
163163
opt_name = 'lr-' + sch.optimizer.__class__.__name__

pytorch_lightning/core/lightning.py

Lines changed: 7 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -990,7 +990,7 @@ def configure_optimizers(
990990
- List or Tuple - List of optimizers.
991991
- Two lists - The first list has multiple optimizers, the second a list of LR schedulers (or lr_dict).
992992
- Dictionary, with an 'optimizer' key, and (optionally) a 'lr_scheduler'
993-
key which value is a single LR scheduler or lr_dict.
993+
key whose value is a single LR scheduler or lr_dict.
994994
- Tuple of dictionaries as described, with an optional 'frequency' key.
995995
- None - Fit will run without any optimizer.
996996
@@ -1002,21 +1002,22 @@ def configure_optimizers(
10021002
In the former case, all optimizers will operate on the given batch in each optimization step.
10031003
In the latter, only one optimizer will operate on the given batch at every step.
10041004
1005-
The lr_dict is a dictionary which contains scheduler and its associated configuration.
1006-
It has five keys. The default configuration is shown below.
1005+
The lr_dict is a dictionary which contains the scheduler and its associated configuration.
1006+
The default configuration is shown below.
10071007
10081008
.. code-block:: python
10091009
10101010
{
1011-
'scheduler': lr_scheduler, # The LR schduler
1011+
'scheduler': lr_scheduler, # The LR scheduler instance (required)
10121012
'interval': 'epoch', # The unit of the scheduler's step size
10131013
'frequency': 1, # The frequency of the scheduler
10141014
'reduce_on_plateau': False, # For ReduceLROnPlateau scheduler
10151015
'monitor': 'val_loss', # Metric for ReduceLROnPlateau to monitor
1016-
'strict': True # Whether to crash the training if `monitor` is not found
1016+
'strict': True, # Whether to crash the training if `monitor` is not found
1017+
'name': None, # Custom name for LearningRateMonitor to use
10171018
}
10181019
1019-
If user only provides LR schedulers, then their configuration will set to default as shown above.
1020+
Only the ``scheduler`` key is required, the rest will be set to the defaults above.
10201021
10211022
Examples:
10221023
.. code-block:: python

pytorch_lightning/trainer/optimizers.py

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -94,6 +94,7 @@ def configure_schedulers(self, schedulers: list, monitor: Optional[str] = None):
9494
lr_schedulers = []
9595
default_config = {
9696
'scheduler': None,
97+
'name': None, # no custom name
9798
'interval': 'epoch', # after epoch is over
9899
'frequency': 1, # every epoch/batch
99100
'reduce_on_plateau': False, # most often not ReduceLROnPlateau scheduler

tests/callbacks/test_lr_monitor.py

Lines changed: 27 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -13,11 +13,11 @@
1313
# limitations under the License.
1414
import pytest
1515

16+
import tests.base.develop_utils as tutils
1617
from pytorch_lightning import Trainer
1718
from pytorch_lightning.callbacks import LearningRateMonitor
1819
from pytorch_lightning.utilities.exceptions import MisconfigurationException
19-
from tests.base import EvalModelTemplate
20-
import tests.base.develop_utils as tutils
20+
from tests.base import BoringModel, EvalModelTemplate
2121

2222

2323
def test_lr_monitor_single_lr(tmpdir):
@@ -43,7 +43,7 @@ def test_lr_monitor_single_lr(tmpdir):
4343
'Momentum should not be logged by default'
4444
assert len(lr_monitor.lrs) == len(trainer.lr_schedulers), \
4545
'Number of learning rates logged does not match number of lr schedulers'
46-
assert all([k in ['lr-Adam'] for k in lr_monitor.lrs.keys()]), \
46+
assert lr_monitor.lr_sch_names == list(lr_monitor.lrs.keys()) == ['lr-Adam'], \
4747
'Names of learning rates not set correctly'
4848

4949

@@ -134,7 +134,7 @@ def test_lr_monitor_multi_lrs(tmpdir, logging_interval):
134134
assert lr_monitor.lrs, 'No learning rates logged'
135135
assert len(lr_monitor.lrs) == len(trainer.lr_schedulers), \
136136
'Number of learning rates logged does not match number of lr schedulers'
137-
assert all([k in ['lr-Adam', 'lr-Adam-1'] for k in lr_monitor.lrs.keys()]), \
137+
assert lr_monitor.lr_sch_names == ['lr-Adam', 'lr-Adam-1'], \
138138
'Names of learning rates not set correctly'
139139

140140
if logging_interval == 'step':
@@ -167,5 +167,27 @@ def test_lr_monitor_param_groups(tmpdir):
167167
assert lr_monitor.lrs, 'No learning rates logged'
168168
assert len(lr_monitor.lrs) == 2 * len(trainer.lr_schedulers), \
169169
'Number of learning rates logged does not match number of param groups'
170-
assert all([k in ['lr-Adam/pg1', 'lr-Adam/pg2'] for k in lr_monitor.lrs.keys()]), \
170+
assert lr_monitor.lr_sch_names == ['lr-Adam']
171+
assert list(lr_monitor.lrs.keys()) == ['lr-Adam/pg1', 'lr-Adam/pg2'], \
171172
'Names of learning rates not set correctly'
173+
174+
175+
def test_lr_monitor_custom_name(tmpdir):
176+
class TestModel(BoringModel):
177+
def configure_optimizers(self):
178+
optimizer, [scheduler] = super().configure_optimizers()
179+
lr_scheduler = {'scheduler': scheduler, 'name': 'my_logging_name'}
180+
return optimizer, [lr_scheduler]
181+
182+
lr_monitor = LearningRateMonitor()
183+
trainer = Trainer(
184+
default_root_dir=tmpdir,
185+
max_epochs=2,
186+
limit_val_batches=0.1,
187+
limit_train_batches=0.5,
188+
callbacks=[lr_monitor],
189+
progress_bar_refresh_rate=0,
190+
weights_summary=None,
191+
)
192+
trainer.fit(TestModel())
193+
assert lr_monitor.lr_sch_names == list(lr_monitor.lrs.keys()) == ['my_logging_name']

tests/trainer/test_optimizers.py

Lines changed: 29 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -15,7 +15,6 @@
1515
import torch
1616

1717
from pytorch_lightning import Callback, Trainer
18-
from pytorch_lightning.core.optimizer import LightningOptimizer
1918
from pytorch_lightning.utilities.exceptions import MisconfigurationException
2019
from tests.base import EvalModelTemplate
2120
from tests.base.boring_model import BoringModel
@@ -177,6 +176,7 @@ def test_reducelronplateau_scheduling(tmpdir):
177176
frequency=1,
178177
reduce_on_plateau=True,
179178
strict=True,
179+
name=None,
180180
), 'lr scheduler was not correctly converted to dict'
181181

182182

@@ -215,7 +215,13 @@ def test_optimizer_return_options(enable_pl_optimizer):
215215
assert len(freq) == 0
216216
assert optim[0] == opt_a
217217
assert lr_sched[0] == dict(
218-
scheduler=scheduler_a, interval='epoch', frequency=1, reduce_on_plateau=False, monitor=None, strict=True
218+
scheduler=scheduler_a,
219+
interval='epoch',
220+
frequency=1,
221+
reduce_on_plateau=False,
222+
monitor=None,
223+
strict=True,
224+
name=None,
219225
)
220226

221227
# opt tuple of 1 list
@@ -225,7 +231,13 @@ def test_optimizer_return_options(enable_pl_optimizer):
225231
assert len(freq) == 0
226232
assert optim[0] == opt_a
227233
assert lr_sched[0] == dict(
228-
scheduler=scheduler_a, interval='epoch', frequency=1, reduce_on_plateau=False, monitor=None, strict=True
234+
scheduler=scheduler_a,
235+
interval='epoch',
236+
frequency=1,
237+
reduce_on_plateau=False,
238+
monitor=None,
239+
strict=True,
240+
name=None,
229241
)
230242

231243
# opt single dictionary
@@ -235,7 +247,13 @@ def test_optimizer_return_options(enable_pl_optimizer):
235247
assert len(freq) == 0
236248
assert optim[0] == opt_a
237249
assert lr_sched[0] == dict(
238-
scheduler=scheduler_a, interval='epoch', frequency=1, reduce_on_plateau=False, monitor=None, strict=True
250+
scheduler=scheduler_a,
251+
interval='epoch',
252+
frequency=1,
253+
reduce_on_plateau=False,
254+
monitor=None,
255+
strict=True,
256+
name=None,
239257
)
240258

241259
# opt multiple dictionaries with frequencies
@@ -247,7 +265,13 @@ def test_optimizer_return_options(enable_pl_optimizer):
247265
assert len(optim) == len(lr_sched) == len(freq) == 2
248266
assert optim[0] == opt_a
249267
assert lr_sched[0] == dict(
250-
scheduler=scheduler_a, interval='epoch', frequency=1, reduce_on_plateau=False, monitor=None, strict=True
268+
scheduler=scheduler_a,
269+
interval='epoch',
270+
frequency=1,
271+
reduce_on_plateau=False,
272+
monitor=None,
273+
strict=True,
274+
name=None,
251275
)
252276
assert freq == [1, 5]
253277

0 commit comments

Comments
 (0)