Skip to content

Commit ba8f637

Browse files
committed
update: test_dynamic_scaler
1 parent 8891787 commit ba8f637

File tree

1 file changed

+16
-1
lines changed

1 file changed

+16
-1
lines changed

tests/test_optimizers.py

Lines changed: 16 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -3,7 +3,16 @@
33
import torch
44
from torch import nn
55

6-
from pytorch_optimizer import GSAM, SAM, CosineScheduler, Lookahead, PCGrad, ProportionScheduler, load_optimizer
6+
from pytorch_optimizer import (
7+
GSAM,
8+
SAM,
9+
CosineScheduler,
10+
DynamicLossScaler,
11+
Lookahead,
12+
PCGrad,
13+
ProportionScheduler,
14+
load_optimizer,
15+
)
716
from pytorch_optimizer.base.exception import NoClosureError, ZeroParameterSizeError
817
from pytorch_optimizer.optimizer.utils import l2_projection
918
from tests.constants import (
@@ -479,3 +488,9 @@ def test_lomo_optimizer(precision, environment):
479488
loss = sphere_loss(next(iter(model.parameters())))
480489
optimizer.grad_norm(loss)
481490
optimizer.fused_backward(loss, lr=0.1)
491+
492+
493+
def test_dynamic_scaler():
494+
scaler = DynamicLossScaler(init_scale=2.0**15, scale_window=1, threshold=1e-2)
495+
scaler.decrease_loss_scale()
496+
scaler.update_scale(overflow=False)

0 commit comments

Comments
 (0)