Skip to content

Commit acb09dc

Browse files
committed
fix: __str__ method
1 parent edee372 commit acb09dc

28 files changed

+31
-66
lines changed

pytorch_optimizer/optimizer/adabelief.py

Lines changed: 1 addition & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -70,7 +70,6 @@ def validate_parameters(self):
7070
self.validate_weight_decay(self.weight_decay)
7171
self.validate_epsilon(self.eps)
7272

73-
@property
7473
def __str__(self) -> str:
7574
return 'AdaBelief'
7675

@@ -106,7 +105,7 @@ def step(self, closure: CLOSURE = None) -> LOSS:
106105

107106
grad = p.grad
108107
if grad.is_sparse:
109-
raise NoSparseGradientError(self.__str__)
108+
raise NoSparseGradientError(str(self))
110109

111110
state = self.state[p]
112111
if len(state) == 0:

pytorch_optimizer/optimizer/adabound.py

Lines changed: 1 addition & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -68,7 +68,6 @@ def validate_parameters(self):
6868
self.validate_weight_decay(self.weight_decay)
6969
self.validate_epsilon(self.eps)
7070

71-
@property
7271
def __str__(self) -> str:
7372
return 'AdaBound'
7473

@@ -99,7 +98,7 @@ def step(self, closure: CLOSURE = None) -> LOSS:
9998

10099
grad = p.grad
101100
if grad.is_sparse:
102-
raise NoSparseGradientError(self.__str__)
101+
raise NoSparseGradientError(str(self))
103102

104103
state = self.state[p]
105104

pytorch_optimizer/optimizer/adafactor.py

Lines changed: 1 addition & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -63,7 +63,6 @@ def validate_parameters(self):
6363
self.validate_epsilon(self.eps1)
6464
self.validate_epsilon(self.eps2)
6565

66-
@property
6766
def __str__(self) -> str:
6867
return 'AdaFactor'
6968

@@ -134,7 +133,7 @@ def step(self, closure: CLOSURE = None) -> LOSS:
134133

135134
grad = p.grad
136135
if grad.is_sparse:
137-
raise NoSparseGradientError(self.__str__)
136+
raise NoSparseGradientError(str(self))
138137

139138
state = self.state[p]
140139

pytorch_optimizer/optimizer/adai.py

Lines changed: 1 addition & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -61,7 +61,6 @@ def validate_parameters(self):
6161
self.validate_weight_decay(self.weight_decay)
6262
self.validate_epsilon(self.eps)
6363

64-
@property
6564
def __str__(self) -> str:
6665
return 'Adai'
6766

@@ -94,7 +93,7 @@ def step(self, closure: CLOSURE = None) -> LOSS:
9493

9594
grad = p.grad
9695
if grad.is_sparse:
97-
raise NoSparseGradientError(self.__str__)
96+
raise NoSparseGradientError(str(self))
9897

9998
param_size += p.numel()
10099

pytorch_optimizer/optimizer/adamp.py

Lines changed: 1 addition & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -67,7 +67,6 @@ def validate_parameters(self):
6767
self.validate_weight_decay_ratio(self.wd_ratio)
6868
self.validate_epsilon(self.eps)
6969

70-
@property
7170
def __str__(self) -> str:
7271
return 'AdamP'
7372

@@ -96,7 +95,7 @@ def step(self, closure: CLOSURE = None) -> LOSS:
9695

9796
grad = p.grad
9897
if grad.is_sparse:
99-
raise NoSparseGradientError(self.__str__)
98+
raise NoSparseGradientError(str(self))
10099

101100
state = self.state[p]
102101
if len(state) == 0:

pytorch_optimizer/optimizer/adams.py

Lines changed: 1 addition & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -48,7 +48,6 @@ def validate_parameters(self):
4848
self.validate_weight_decay(self.weight_decay)
4949
self.validate_epsilon(self.eps)
5050

51-
@property
5251
def __str__(self) -> str:
5352
return 'AdamS'
5453

@@ -80,7 +79,7 @@ def step(self, closure: CLOSURE = None) -> LOSS:
8079

8180
grad = p.grad
8281
if grad.is_sparse:
83-
raise NoSparseGradientError(self.__str__)
82+
raise NoSparseGradientError(str(self))
8483

8584
param_size += p.numel()
8685

pytorch_optimizer/optimizer/adan.py

Lines changed: 1 addition & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -61,7 +61,6 @@ def validate_parameters(self):
6161
self.validate_epsilon(self.eps)
6262
self.validate_norm(self.max_grad_norm)
6363

64-
@property
6564
def __str__(self) -> str:
6665
return 'Adan'
6766

@@ -122,7 +121,7 @@ def step(self, closure: CLOSURE = None) -> LOSS:
122121

123122
grad = p.grad
124123
if grad.is_sparse:
125-
raise NoSparseGradientError(self.__str__)
124+
raise NoSparseGradientError(str(self))
126125

127126
state = self.state[p]
128127
if len(state) == 0:

pytorch_optimizer/optimizer/adapnm.py

Lines changed: 1 addition & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -59,7 +59,6 @@ def validate_parameters(self):
5959
self.validate_weight_decay(self.weight_decay)
6060
self.validate_epsilon(self.eps)
6161

62-
@property
6362
def __str__(self) -> str:
6463
return 'AdaPNM'
6564

@@ -92,7 +91,7 @@ def step(self, closure: CLOSURE = None) -> LOSS:
9291

9392
grad = p.grad
9493
if grad.is_sparse:
95-
raise NoSparseGradientError(self.__str__)
94+
raise NoSparseGradientError(str(self))
9695

9796
if group['weight_decouple']:
9897
p.mul_(1.0 - group['lr'] * group['weight_decay'])

pytorch_optimizer/optimizer/alig.py

Lines changed: 1 addition & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -46,7 +46,6 @@ def validate_parameters(self):
4646
self.validate_momentum(self.momentum)
4747
self.validate_epsilon(self.eps)
4848

49-
@property
5049
def __str__(self) -> str:
5150
return 'AliG'
5251

@@ -91,7 +90,7 @@ def step(self, closure: CLOSURE = None) -> LOSS:
9190

9291
grad = p.grad
9392
if grad.is_sparse:
94-
raise NoSparseGradientError(self.__str__)
93+
raise NoSparseGradientError(str(self))
9594

9695
state = self.state[p]
9796
if len(state) == 0 and momentum > 0.0:

pytorch_optimizer/optimizer/apollo.py

Lines changed: 1 addition & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -64,7 +64,6 @@ def validate_parameters(self):
6464
self.validate_weight_decay_type(self.weight_decay_type)
6565
self.validate_epsilon(self.eps)
6666

67-
@property
6867
def __str__(self) -> str:
6968
return 'Apollo'
7069

@@ -110,7 +109,7 @@ def step(self, closure: CLOSURE = None) -> LOSS:
110109

111110
grad = p.grad
112111
if grad.is_sparse:
113-
raise NoSparseGradientError(self.__str__)
112+
raise NoSparseGradientError(str(self))
114113

115114
state = self.state[p]
116115
if len(state) == 0:

0 commit comments

Comments
 (0)