Skip to content

Commit f520e5f

Browse files
committed
update: remove marker
1 parent 26723e7 commit f520e5f

9 files changed

+0
-78
lines changed

tests/test_create_optimizer.py

Lines changed: 0 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -1,10 +1,7 @@
1-
import pytest
2-
31
from pytorch_optimizer import create_optimizer
42
from tests.utils import LogisticRegression
53

64

7-
@pytest.mark.utils
85
def test_create_optimizer():
96
model = LogisticRegression()
107

tests/test_gradients.py

Lines changed: 0 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -7,7 +7,6 @@
77
from tests.utils import build_environment, simple_parameter, simple_sparse_parameter
88

99

10-
@pytest.mark.gradient
1110
@pytest.mark.parametrize('optimizer_name', [*VALID_OPTIMIZER_NAMES, 'lookahead'])
1211
def test_no_gradients(optimizer_name):
1312
p1 = simple_parameter(require_grad=True)
@@ -33,7 +32,6 @@ def test_no_gradients(optimizer_name):
3332
optimizer.step(lambda: 0.1) # for AliG optimizer
3433

3534

36-
@pytest.mark.gradient
3735
@pytest.mark.parametrize('no_sparse_optimizer', NO_SPARSE_OPTIMIZERS)
3836
def test_sparse_not_supported(no_sparse_optimizer):
3937
param = simple_sparse_parameter()
@@ -47,7 +45,6 @@ def test_sparse_not_supported(no_sparse_optimizer):
4745
optimizer.step(lambda: 0.1)
4846

4947

50-
@pytest.mark.gradient
5148
@pytest.mark.parametrize('sparse_optimizer', SPARSE_OPTIMIZERS)
5249
def test_sparse_supported(sparse_optimizer):
5350
param = simple_sparse_parameter()
@@ -81,7 +78,6 @@ def test_sparse_supported(sparse_optimizer):
8178
optimizer.step()
8279

8380

84-
@pytest.mark.gradient
8581
@pytest.mark.parametrize('optimizer_name', VALID_OPTIMIZER_NAMES)
8682
def test_bf16_gradient(optimizer_name):
8783
# torch.eye does not support bf16
@@ -96,7 +92,6 @@ def test_bf16_gradient(optimizer_name):
9692
optimizer.step(lambda: 0.1)
9793

9894

99-
@pytest.mark.gradient
10095
def test_sam_no_gradient():
10196
(x_data, y_data), model, loss_fn = build_environment()
10297
model.fc1.weight.requires_grad = False

tests/test_load_lr_schedulers.py

Lines changed: 0 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -4,19 +4,16 @@
44
from tests.constants import INVALID_LR_SCHEDULER_NAMES, VALID_LR_SCHEDULER_NAMES
55

66

7-
@pytest.mark.utils
87
@pytest.mark.parametrize('valid_lr_scheduler_names', VALID_LR_SCHEDULER_NAMES)
98
def test_load_optimizers_valid(valid_lr_scheduler_names):
109
load_lr_scheduler(valid_lr_scheduler_names)
1110

1211

13-
@pytest.mark.utils
1412
@pytest.mark.parametrize('invalid_lr_scheduler_names', INVALID_LR_SCHEDULER_NAMES)
1513
def test_load_optimizers_invalid(invalid_lr_scheduler_names):
1614
with pytest.raises(NotImplementedError):
1715
load_lr_scheduler(invalid_lr_scheduler_names)
1816

1917

20-
@pytest.mark.utils
2118
def test_get_supported_lr_schedulers():
2219
assert len(get_supported_lr_schedulers()) == 10

tests/test_load_optimizers.py

Lines changed: 0 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -4,19 +4,16 @@
44
from tests.constants import INVALID_OPTIMIZER_NAMES, VALID_OPTIMIZER_NAMES
55

66

7-
@pytest.mark.utils
87
@pytest.mark.parametrize('valid_optimizer_names', VALID_OPTIMIZER_NAMES)
98
def test_load_optimizers_valid(valid_optimizer_names):
109
load_optimizer(valid_optimizer_names)
1110

1211

13-
@pytest.mark.utils
1412
@pytest.mark.parametrize('invalid_optimizer_names', INVALID_OPTIMIZER_NAMES)
1513
def test_load_optimizers_invalid(invalid_optimizer_names):
1614
with pytest.raises(NotImplementedError):
1715
load_optimizer(invalid_optimizer_names)
1816

1917

20-
@pytest.mark.utils
2118
def test_get_supported_optimizers():
2219
assert len(get_supported_optimizers()) == 29

tests/test_lr_scheduler_parameters.py

Lines changed: 0 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -8,7 +8,6 @@
88
from tests.utils import Example
99

1010

11-
@pytest.mark.lr_scheduler
1211
def test_cosine_annealing_warmup_restarts_params():
1312
optimizer = AdamP(Example().parameters())
1413

@@ -37,7 +36,6 @@ def test_cosine_annealing_warmup_restarts_params():
3736
lr_scheduler.step(epoch=None)
3837

3938

40-
@pytest.mark.lr_scheduler
4139
def test_linear_warmup_lr_scheduler_params():
4240
optimizer = AdamP(Example().parameters())
4341

@@ -62,7 +60,6 @@ def test_linear_warmup_lr_scheduler_params():
6260
PolyScheduler(optimizer=optimizer, t_max=1, max_lr=1, min_lr=1, init_lr=1, warmup_steps=-1)
6361

6462

65-
@pytest.mark.lr_scheduler
6663
def test_chebyshev_params():
6764
with pytest.raises(IndexError):
6865
get_chebyshev_schedule(2)

tests/test_lr_schedulers.py

Lines changed: 0 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -114,7 +114,6 @@
114114
PROPORTION_LEARNING_RATES = [(1e-1, 1e-1, 2.0), (1e-1, 1e-3, 1.090909)]
115115

116116

117-
@pytest.mark.lr_scheduler
118117
@pytest.mark.parametrize('cosine_annealing_warmup_restart_param', CAWR_RECIPES)
119118
def test_cosine_annealing_warmup_restarts(cosine_annealing_warmup_restart_param):
120119
model = Example()
@@ -151,13 +150,11 @@ def test_cosine_annealing_warmup_restarts(cosine_annealing_warmup_restart_param)
151150
np.testing.assert_almost_equal(expected_lrs[epoch], lr)
152151

153152

154-
@pytest.mark.lr_scheduler
155153
def test_get_chebyshev_scheduler():
156154
np.testing.assert_almost_equal(get_chebyshev_schedule(3), 1.81818182, decimal=6)
157155
np.testing.assert_array_equal(chebyshev_perm(5), np.asarray([0, 7, 3, 4, 1, 6, 2, 5]))
158156

159157

160-
@pytest.mark.lr_scheduler
161158
def test_linear_warmup_linear_scheduler():
162159
optimizer = AdamP(Example().parameters())
163160
lr_scheduler = LinearScheduler(optimizer, t_max=10, max_lr=1e-2, min_lr=1e-4, init_lr=1e-3, warmup_steps=5)
@@ -167,7 +164,6 @@ def test_linear_warmup_linear_scheduler():
167164
np.testing.assert_almost_equal(expected_lr, lr_scheduler.get_lr())
168165

169166

170-
@pytest.mark.lr_scheduler
171167
def test_linear_warmup_cosine_scheduler():
172168
optimizer = AdamP(Example().parameters())
173169
lr_scheduler = CosineScheduler(optimizer, t_max=10, max_lr=1e-2, min_lr=1e-4, init_lr=1e-3, warmup_steps=5)
@@ -177,7 +173,6 @@ def test_linear_warmup_cosine_scheduler():
177173
np.testing.assert_almost_equal(expected_lr, lr_scheduler.get_lr(), 5)
178174

179175

180-
@pytest.mark.lr_scheduler
181176
def test_linear_warmup_poly_scheduler():
182177
optimizer = AdamP(Example().parameters())
183178
lr_scheduler = PolyScheduler(optimizer=optimizer, t_max=10, max_lr=1e-2, min_lr=1e-4, init_lr=1e-3, warmup_steps=5)
@@ -187,7 +182,6 @@ def test_linear_warmup_poly_scheduler():
187182
np.testing.assert_almost_equal(expected_lr, lr_scheduler.get_lr(), 6)
188183

189184

190-
@pytest.mark.lr_scheduler
191185
@pytest.mark.parametrize('proportion_learning_rate', PROPORTION_LEARNING_RATES)
192186
def test_proportion_scheduler(proportion_learning_rate: Tuple[float, float, float]):
193187
base_optimizer = AdamP(Example().parameters())
@@ -207,7 +201,6 @@ def test_proportion_scheduler(proportion_learning_rate: Tuple[float, float, floa
207201
np.testing.assert_almost_equal(proportion_learning_rate[2], rho_scheduler.get_lr(), 6)
208202

209203

210-
@pytest.mark.lr_scheduler
211204
def test_proportion_no_last_lr_scheduler():
212205
base_optimizer = AdamP(Example().parameters())
213206
lr_scheduler = CosineAnnealingWarmupRestarts(
@@ -229,7 +222,6 @@ def test_proportion_no_last_lr_scheduler():
229222
np.testing.assert_almost_equal(2.0, rho_scheduler.get_lr(), 6)
230223

231224

232-
@pytest.mark.lr_scheduler
233225
def test_deberta_v3_large_lr_scheduler():
234226
try:
235227
from transformers import AutoConfig, AutoModel

tests/test_optimizer_parameters.py

Lines changed: 0 additions & 26 deletions
Original file line numberDiff line numberDiff line change
@@ -8,7 +8,6 @@
88
from tests.utils import Example, simple_parameter
99

1010

11-
@pytest.mark.cfg
1211
@pytest.mark.parametrize('optimizer_name', VALID_OPTIMIZER_NAMES)
1312
def test_learning_rate(optimizer_name):
1413
if optimizer_name in ('alig',):
@@ -24,7 +23,6 @@ def test_learning_rate(optimizer_name):
2423
optimizer(None, **config)
2524

2625

27-
@pytest.mark.cfg
2826
@pytest.mark.parametrize('optimizer_name', VALID_OPTIMIZER_NAMES)
2927
def test_epsilon(optimizer_name):
3028
if optimizer_name in ('nero', 'shampoo', 'scalableshampoo', 'dadaptsgd', 'adafactor', 'lion'):
@@ -42,7 +40,6 @@ def test_epsilon(optimizer_name):
4240
assert str(error_info.value) == '[-] epsilon -1e-06 must be non-negative'
4341

4442

45-
@pytest.mark.cfg
4643
def test_shampoo_epsilon():
4744
shampoo = load_optimizer('Shampoo')
4845
scalable_shampoo = load_optimizer('ScalableShampoo')
@@ -57,7 +54,6 @@ def test_shampoo_epsilon():
5754
shampoo(None, matrix_eps=-1e-6)
5855

5956

60-
@pytest.mark.cfg
6157
def test_adafactor_epsilon():
6258
adafactor = load_optimizer('adafactor')
6359

@@ -68,7 +64,6 @@ def test_adafactor_epsilon():
6864
adafactor(None, eps2=-1e-6)
6965

7066

71-
@pytest.mark.cfg
7267
@pytest.mark.parametrize('optimizer_name', VALID_OPTIMIZER_NAMES)
7368
def test_weight_decay(optimizer_name):
7469
if optimizer_name in ('nero', 'alig'):
@@ -86,7 +81,6 @@ def test_weight_decay(optimizer_name):
8681
assert str(error_info.value) == '[-] weight_decay -0.001 must be non-negative'
8782

8883

89-
@pytest.mark.cfg
9084
@pytest.mark.parametrize('optimizer_name', ['apollo'])
9185
def test_weight_decay_type(optimizer_name):
9286
optimizer = load_optimizer(optimizer_name)
@@ -95,7 +89,6 @@ def test_weight_decay_type(optimizer_name):
9589
optimizer(None, weight_decay_type='dummy')
9690

9791

98-
@pytest.mark.cfg
9992
@pytest.mark.parametrize('optimizer_name', ['apollo'])
10093
def test_rebound(optimizer_name):
10194
optimizer = load_optimizer(optimizer_name)
@@ -104,55 +97,48 @@ def test_rebound(optimizer_name):
10497
optimizer(None, rebound='dummy')
10598

10699

107-
@pytest.mark.cfg
108100
@pytest.mark.parametrize('optimizer_name', ['adamp', 'sgdp'])
109101
def test_wd_ratio(optimizer_name):
110102
optimizer = load_optimizer(optimizer_name)
111103
with pytest.raises(ValueError):
112104
optimizer(None, wd_ratio=-1e-3)
113105

114106

115-
@pytest.mark.cfg
116107
@pytest.mark.parametrize('optimizer_name', ['lars'])
117108
def test_trust_coefficient(optimizer_name):
118109
optimizer = load_optimizer(optimizer_name)
119110
with pytest.raises(ValueError):
120111
optimizer(None, trust_coefficient=-1e-3)
121112

122113

123-
@pytest.mark.cfg
124114
@pytest.mark.parametrize('optimizer_name', ['madgrad', 'lars'])
125115
def test_momentum(optimizer_name):
126116
optimizer = load_optimizer(optimizer_name)
127117
with pytest.raises(ValueError):
128118
optimizer(None, momentum=-1e-3)
129119

130120

131-
@pytest.mark.cfg
132121
@pytest.mark.parametrize('optimizer_name', ['ranger'])
133122
def test_lookahead_k(optimizer_name):
134123
optimizer = load_optimizer(optimizer_name)
135124
with pytest.raises(ValueError):
136125
optimizer(None, k=-1)
137126

138127

139-
@pytest.mark.cfg
140128
@pytest.mark.parametrize('optimizer_name', ['ranger21'])
141129
def test_beta0(optimizer_name):
142130
optimizer = load_optimizer(optimizer_name)
143131
with pytest.raises(ValueError):
144132
optimizer(None, num_iterations=200, beta0=-0.1)
145133

146134

147-
@pytest.mark.cfg
148135
@pytest.mark.parametrize('optimizer_name', ['nero', 'apollo'])
149136
def test_beta(optimizer_name):
150137
optimizer = load_optimizer(optimizer_name)
151138
with pytest.raises(ValueError):
152139
optimizer(None, beta=-0.1)
153140

154141

155-
@pytest.mark.cfg
156142
@pytest.mark.parametrize('optimizer_name', BETA_OPTIMIZER_NAMES)
157143
def test_betas(optimizer_name):
158144
optimizer = load_optimizer(optimizer_name)
@@ -174,7 +160,6 @@ def test_betas(optimizer_name):
174160
optimizer(None, betas=(0.1, 0.1, -0.1))
175161

176162

177-
@pytest.mark.cfg
178163
def test_reduction():
179164
parameters = Example().parameters()
180165
optimizer = load_optimizer('adamp')(parameters)
@@ -183,7 +168,6 @@ def test_reduction():
183168
PCGrad(optimizer, reduction='wrong')
184169

185170

186-
@pytest.mark.cfg
187171
@pytest.mark.parametrize('optimizer_name', ['scalableshampoo', 'shampoo'])
188172
def test_update_frequency(optimizer_name):
189173
optimizer = load_optimizer(optimizer_name)
@@ -199,21 +183,18 @@ def test_update_frequency(optimizer_name):
199183
optimizer(None, preconditioning_compute_steps=-1)
200184

201185

202-
@pytest.mark.cfg
203186
@pytest.mark.parametrize('optimizer_name', ['adan', 'lamb'])
204187
def test_norm(optimizer_name):
205188
optimizer = load_optimizer(optimizer_name)
206189
with pytest.raises(ValueError):
207190
optimizer(None, max_grad_norm=-0.1)
208191

209192

210-
@pytest.mark.cfg
211193
def test_sam_parameters():
212194
with pytest.raises(ValueError, match=''):
213195
SAM(None, load_optimizer('adamp'), rho=-0.1)
214196

215197

216-
@pytest.mark.cfg
217198
def test_lookahead_parameters():
218199
param = simple_parameter()
219200
optimizer = load_optimizer('adamp')([param])
@@ -235,7 +216,6 @@ def test_lookahead_parameters():
235216
Lookahead(optimizer, pullback_momentum='invalid')
236217

237218

238-
@pytest.mark.cfg
239219
def test_sam_methods():
240220
param = simple_parameter()
241221

@@ -244,7 +224,6 @@ def test_sam_methods():
244224
optimizer.load_state_dict(optimizer.state_dict())
245225

246226

247-
@pytest.mark.cfg
248227
def test_safe_fp16_methods():
249228
param = simple_parameter()
250229

@@ -265,14 +244,12 @@ def test_safe_fp16_methods():
265244
assert optimizer.loss_scale == 2.0 ** (15 - 1)
266245

267246

268-
@pytest.mark.cfg
269247
def test_ranger21_warm_methods():
270248
assert Ranger21.build_warm_up_iterations(1000, 0.999) == 220
271249
assert Ranger21.build_warm_up_iterations(4500, 0.999) == 2000
272250
assert Ranger21.build_warm_down_iterations(1000) == 280
273251

274252

275-
@pytest.mark.cfg
276253
@pytest.mark.parametrize('optimizer', ['ranger21', 'adai'])
277254
def test_size_of_parameter(optimizer):
278255
param = simple_parameter(require_grad=False)
@@ -282,7 +259,6 @@ def test_size_of_parameter(optimizer):
282259
load_optimizer(optimizer)([param], 1).step()
283260

284261

285-
@pytest.mark.cfg
286262
def test_ranger21_closure():
287263
model: nn.Module = Example()
288264
optimizer = load_optimizer('ranger21')(model.parameters(), num_iterations=100, betas=(0.9, 1e-9))
@@ -297,7 +273,6 @@ def closure():
297273
optimizer.step(closure)
298274

299275

300-
@pytest.mark.cfg
301276
def test_adafactor_reset():
302277
param = torch.zeros(1).requires_grad_(True)
303278
param.grad = torch.zeros(1)
@@ -306,7 +281,6 @@ def test_adafactor_reset():
306281
optimizer.reset()
307282

308283

309-
@pytest.mark.cfg
310284
def test_adafactor_get_lr():
311285
model: nn.Module = Example()
312286

0 commit comments

Comments
 (0)