@@ -61,10 +61,10 @@ def create_optimizer(args, model, filter_bias_and_bn=True):
6161 opt_split = opt_lower .split ('_' )
6262 opt_lower = opt_split [- 1 ]
6363 if opt_lower == 'sgd' or opt_lower == 'nesterov' :
64- del opt_args [ 'eps' ]
64+ opt_args . pop ( 'eps' , None )
6565 optimizer = optim .SGD (parameters , momentum = args .momentum , nesterov = True , ** opt_args )
6666 elif opt_lower == 'momentum' :
67- del opt_args [ 'eps' ]
67+ opt_args . pop ( 'eps' , None )
6868 optimizer = optim .SGD (parameters , momentum = args .momentum , nesterov = False , ** opt_args )
6969 elif opt_lower == 'adam' :
7070 optimizer = optim .Adam (parameters , ** opt_args )
@@ -95,10 +95,10 @@ def create_optimizer(args, model, filter_bias_and_bn=True):
9595 elif opt_lower == 'nvnovograd' :
9696 optimizer = NvNovoGrad (parameters , ** opt_args )
9797 elif opt_lower == 'fusedsgd' :
98- del opt_args [ 'eps' ]
98+ opt_args . pop ( 'eps' , None )
9999 optimizer = FusedSGD (parameters , momentum = args .momentum , nesterov = True , ** opt_args )
100100 elif opt_lower == 'fusedmomentum' :
101- del opt_args [ 'eps' ]
101+ opt_args . pop ( 'eps' , None )
102102 optimizer = FusedSGD (parameters , momentum = args .momentum , nesterov = False , ** opt_args )
103103 elif opt_lower == 'fusedadam' :
104104 optimizer = FusedAdam (parameters , adam_w_mode = False , ** opt_args )
0 commit comments