@@ -298,7 +298,7 @@ def test_adanorm_optimizer(optimizer_config, environment):
298298
299299
300300@pytest .mark .parametrize ('optimizer_config' , ADANORM_SUPPORTED_OPTIMIZERS , ids = ids )
301- def test_adanorm_condition (optimizer_config ):
301+ def test_adanorm_variant (optimizer_config ):
302302 param = simple_parameter (True )
303303 param .grad = torch .ones (1 , 1 )
304304
@@ -312,7 +312,7 @@ def test_adanorm_condition(optimizer_config):
312312
313313
314314@pytest .mark .parametrize ('optimizer_config' , ADAMD_SUPPORTED_OPTIMIZERS , ids = ids )
315- def test_adamd_optimizers (optimizer_config , environment ):
315+ def test_adamd_variant (optimizer_config , environment ):
316316 (x_data , y_data ), model , loss_fn = environment
317317
318318 optimizer_class , config , num_iterations = optimizer_config
@@ -337,7 +337,7 @@ def test_adamd_optimizers(optimizer_config, environment):
337337
338338
339339@pytest .mark .parametrize ('optimizer_config' , COPT_SUPPORTED_OPTIMIZERS , ids = ids )
340- def test_copt_optimizers (optimizer_config , environment ):
340+ def test_cautious_variant (optimizer_config , environment ):
341341 (x_data , y_data ), model , loss_fn = environment
342342
343343 optimizer_class , config , num_iterations = optimizer_config
0 commit comments