Skip to content

Commit 3178f10

Browse files
committed
refactor: ZeroParameterSizeError
1 parent 412773c commit 3178f10

File tree

4 files changed

+8
-8
lines changed

4 files changed

+8
-8
lines changed

pytorch_optimizer/optimizer/adai.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -4,7 +4,7 @@
44
from torch.optim.optimizer import Optimizer
55

66
from pytorch_optimizer.base.base_optimizer import BaseOptimizer
7-
from pytorch_optimizer.base.exception import NoSparseGradientError, ZeroParameterSize
7+
from pytorch_optimizer.base.exception import NoSparseGradientError, ZeroParameterSizeError
88
from pytorch_optimizer.base.types import BETAS, CLOSURE, DEFAULTS, LOSS, PARAMETERS
99
from pytorch_optimizer.optimizer.gc import centralize_gradient
1010

@@ -124,7 +124,7 @@ def step(self, closure: CLOSURE = None) -> LOSS:
124124
exp_avg_sq_hat_sum += exp_avg_sq.sum() / bias_correction2
125125

126126
if param_size == 0:
127-
raise ZeroParameterSize()
127+
raise ZeroParameterSizeError()
128128

129129
exp_avg_sq_hat_mean = exp_avg_sq_hat_sum / param_size
130130

pytorch_optimizer/optimizer/ranger21.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -6,7 +6,7 @@
66
from torch.optim import Optimizer
77

88
from pytorch_optimizer.base.base_optimizer import BaseOptimizer
9-
from pytorch_optimizer.base.exception import NoSparseGradientError, ZeroParameterSize
9+
from pytorch_optimizer.base.exception import NoSparseGradientError, ZeroParameterSizeError
1010
from pytorch_optimizer.base.types import BETAS, CLOSURE, DEFAULTS, LOSS, PARAMETERS
1111
from pytorch_optimizer.optimizer.agc import agc
1212
from pytorch_optimizer.optimizer.gc import centralize_gradient
@@ -227,7 +227,7 @@ def step(self, closure: CLOSURE = None) -> LOSS:
227227

228228
# stable weight decay
229229
if param_size == 0:
230-
raise ZeroParameterSize()
230+
raise ZeroParameterSizeError()
231231

232232
variance_normalized = math.sqrt(variance_ma_sum / param_size)
233233
if math.isnan(variance_normalized):

tests/test_optimizer_parameters.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -5,7 +5,7 @@
55
from torch import nn
66

77
from pytorch_optimizer import SAM, Lookahead, PCGrad, Ranger21, SafeFP16Optimizer, load_optimizer
8-
from pytorch_optimizer.base.exception import ZeroParameterSize
8+
from pytorch_optimizer.base.exception import ZeroParameterSizeError
99
from tests.utils import Example
1010

1111
OPTIMIZER_NAMES: List[str] = [
@@ -219,7 +219,7 @@ def test_size_of_parameter(optimizer):
219219
model: nn.Module = nn.Linear(1, 1, bias=False)
220220
model.requires_grad_(False)
221221

222-
with pytest.raises(ZeroParameterSize):
222+
with pytest.raises(ZeroParameterSizeError):
223223
load_optimizer(optimizer)(model.parameters(), 100).step()
224224

225225

tests/test_optimizers.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -30,7 +30,7 @@
3030
SafeFP16Optimizer,
3131
Shampoo,
3232
)
33-
from pytorch_optimizer.base.exception import ZeroParameterSize
33+
from pytorch_optimizer.base.exception import ZeroParameterSizeError
3434
from tests.utils import (
3535
MultiHeadLogisticRegression,
3636
build_environment,
@@ -361,7 +361,7 @@ def test_closure(optimizer):
361361

362362
try:
363363
optimizer.step(closure=dummy_closure)
364-
except ZeroParameterSize: # in case of Ranger21, Adai optimizers
364+
except ZeroParameterSizeError: # in case of Ranger21, Adai optimizers
365365
pass
366366

367367

0 commit comments

Comments
 (0)