forked from mosaicml/examples
-
Notifications
You must be signed in to change notification settings - Fork 0
Expand file tree
/
Copy pathbuilders.py
More file actions
88 lines (74 loc) · 3.33 KB
/
builders.py
File metadata and controls
88 lines (74 loc) · 3.33 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
# Copyright 2022 MosaicML Examples authors
# SPDX-License-Identifier: Apache-2.0
import composer
from composer import algorithms
from composer.callbacks import LRMonitor, MemoryMonitor, SpeedMonitor
from composer.loggers import WandBLogger
from composer.optim import DecoupledAdamW
from composer.optim.scheduler import (ConstantWithWarmupScheduler,
CosineAnnealingWithWarmupScheduler,
LinearWithWarmupScheduler)
from packaging import version
from mosaicml_examples.speed_monitor_w_mfu import SpeedMonitorMFU
from mosaicml_examples.text_data import build_text_dataloader
def build_callback(name, kwargs):
if name == 'lr_monitor':
return LRMonitor()
elif name == 'memory_monitor':
return MemoryMonitor()
elif name == 'speed_monitor':
if version.parse(composer.__version__) < version.parse('0.12.0'):
return SpeedMonitor(window_size=kwargs.get('window_size', 1))
return SpeedMonitorMFU(window_size=kwargs.get('window_size', 1),
gpu_flops_available=kwargs.get(
'gpu_flops_available', None))
elif name == 'optimizer_monitor':
try:
from composer.callbacks import OptimizerMonitor
return OptimizerMonitor(log_optimizer_metrics=kwargs.get(
'log_optimizer_metrics', True),)
except:
raise ValueError(f'Not sure how to build callback: {name}')
else:
raise ValueError(f'Not sure how to build callback: {name}')
def build_logger(name, kwargs):
if name == 'wandb':
return WandBLogger(**kwargs)
else:
raise ValueError(f'Not sure how to build logger: {name}')
def build_algorithm(name, kwargs):
if name == 'gradient_clipping':
return algorithms.GradientClipping(**kwargs)
elif name == 'alibi':
return algorithms.Alibi(**kwargs)
elif name == 'fused_layernorm':
return algorithms.FusedLayerNorm(**kwargs)
elif name == 'gated_linear_units':
return algorithms.GatedLinearUnits(**kwargs)
else:
raise ValueError(f'Not sure how to build algorithm: {name}')
def build_optimizer(cfg, model):
if cfg.name == 'decoupled_adamw':
return DecoupledAdamW(model.parameters(),
lr=cfg.lr,
betas=cfg.betas,
eps=cfg.eps,
weight_decay=cfg.weight_decay)
else:
raise ValueError(f'Not sure how to build optimizer: {cfg.name}')
def build_scheduler(cfg):
if cfg.name == 'constant_with_warmup':
return ConstantWithWarmupScheduler(t_warmup=cfg.t_warmup)
elif cfg.name == 'cosine_with_warmup':
return CosineAnnealingWithWarmupScheduler(t_warmup=cfg.t_warmup,
alpha_f=cfg.alpha_f)
elif cfg.name == 'linear_decay_with_warmup':
return LinearWithWarmupScheduler(t_warmup=cfg.t_warmup,
alpha_f=cfg.alpha_f)
else:
raise ValueError(f'Not sure how to build scheduler: {cfg.name}')
def build_dataloader(cfg, device_batch_size):
if cfg.name == 'text':
return build_text_dataloader(cfg, device_batch_size)
else:
raise ValueError(f'Not sure how to build dataloader with config: {cfg}')