-
Notifications
You must be signed in to change notification settings - Fork 7
Expand file tree
/
Copy pathconfig_scheduler.py
More file actions
50 lines (40 loc) · 1.7 KB
/
config_scheduler.py
File metadata and controls
50 lines (40 loc) · 1.7 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
import torch
from hydra_zen import builds, store, MISSING, make_config
from typing import Union
def get_scheduler(optimizer,
scheduler_name: Union[str,None]=None,
**kwargs):
"""
This function is internal to this file - it is exposed via global state by
means of the Hydra store below.
Returns requested LR scheduler, constructed with kwargs
"""
if not scheduler_name:
return None
if str(scheduler_name).lower() == 'exponential':
return torch.optim.lr_scheduler.ExponentialLR(optimizer, **kwargs)
elif str(scheduler_name).lower() == 'reducelronplateau':
return torch.optim.lr_scheduler.ReduceLROnPlateau(optimizer, **kwargs)
elif scheduler_name:
raise NotImplementedError('only Exponential and ReduceLROnPlateau are currently supported schedulers. Leave lr_scheduler=None for constant LR')
## Scheduler configs
def register_configs():
"""
Registers configurations for LR schedulers with the Hydra store
"""
scheduler_builder = builds(get_scheduler,
scheduler_name=MISSING,
zen_partial=True)
no_scheduler_config = scheduler_builder(scheduler_name=None)
store(no_scheduler_config,
name='none',
group='scheduler_builder')
plateau_lr_config = scheduler_builder(scheduler_name='reducelronplateau')
store(plateau_lr_config,
name='reducelronplateau',
group='scheduler_builder')
exponential_lr_config = scheduler_builder(scheduler_name='exponential')
store(exponential_lr_config,
name='exponential',
group='scheduler_builder')
store.add_to_hydra_store()