@@ -109,7 +109,7 @@ def test_f32_optimizers(optimizer_fp32_config):
109109
110110 optimizer_name : str = optimizer_class .__name__
111111 if optimizer_name == 'Nero' and 'constraints' not in config :
112- assert True
112+ pytest . skip ( f'skip { optimizer_name } w/o constraints' )
113113
114114 optimizer = optimizer_class (model .parameters (), ** config )
115115
@@ -167,7 +167,7 @@ def test_safe_f16_optimizers(optimizer_fp16_config):
167167 or (optimizer_name == 'Nero' )
168168 or (optimizer_name == 'Adan' and 'weight_decay' not in config )
169169 ):
170- assert True
170+ pytest . skip ( f'skip { optimizer_name } ' )
171171
172172 optimizer = SafeFP16Optimizer (optimizer_class (model .parameters (), ** config ))
173173
@@ -195,7 +195,7 @@ def test_sam_optimizers(adaptive, optimizer_sam_config):
195195
196196 optimizer_class , config , iterations = optimizer_sam_config
197197 if optimizer_class .__name__ == 'Shampoo' :
198- assert True
198+ pytest . skip ( f'skip { optimizer_class . __name__ } ' )
199199
200200 optimizer = SAM (model .parameters (), optimizer_class , ** config , adaptive = adaptive )
201201
@@ -221,7 +221,7 @@ def test_sam_optimizers_with_closure(adaptive, optimizer_sam_config):
221221
222222 optimizer_class , config , iterations = optimizer_sam_config
223223 if optimizer_class .__name__ == 'Shampoo' :
224- assert True
224+ pytest . skip ( f'skip { optimizer_class . __name__ } ' )
225225
226226 optimizer = SAM (model .parameters (), optimizer_class , ** config , adaptive = adaptive )
227227
@@ -286,7 +286,7 @@ def test_pc_grad_optimizers(reduction, optimizer_pc_grad_config):
286286 optimizer = PCGrad (optimizer_class (model .parameters (), ** config ), reduction = reduction )
287287
288288 if optimizer_class .__name__ == 'RaLamb' and 'pre_norm' in config :
289- assert True
289+ pytest . skip ( f'skip { optimizer_class . __name__ } w/ pre_norm' )
290290
291291 init_loss , loss = np .inf , np .inf
292292 for _ in range (iterations ):
@@ -314,9 +314,8 @@ def test_no_gradients(optimizer_config):
314314 optimizer_class , config , iterations = optimizer_config
315315 optimizer = optimizer_class (model .parameters (), ** config )
316316
317- optimizer_name : str = optimizer_class .__name__
318- if optimizer_name == 'Nero' :
319- assert True
317+ if optimizer_class .__name__ == 'Nero' :
318+ pytest .skip (f'skip { optimizer_class .__name__ } ' )
320319
321320 init_loss , loss = np .inf , np .inf
322321 for _ in range (iterations ):
@@ -341,7 +340,7 @@ def test_closure(optimizer_config):
341340
342341 optimizer_class , config , _ = optimizer_config
343342 if optimizer_class .__name__ == 'Ranger21' :
344- assert True
343+ pytest . skip ( f'skip { optimizer_class . __name__ } ' )
345344
346345 optimizer = optimizer_class (model .parameters (), ** config )
347346
0 commit comments