@@ -55,15 +55,15 @@ def run(params):
55
55
optimizer = optimizers .deserialize ({'class_name' : params ['optimizer' ], 'config' : {}})
56
56
57
57
# I don't know why we set base_lr. It doesn't appear to be used.
58
- if 'base_lr' in params and params ['base_lr' ] > 0 :
59
- base_lr = params ['base_lr' ]
60
- else :
61
- base_lr = K .get_value (optimizer .lr )
58
+ # if 'base_lr' in params and params['base_lr'] > 0:
59
+ # base_lr = params['base_lr']
60
+ # else:
61
+ # base_lr = K.get_value(optimizer.lr)
62
62
63
63
if 'learning_rate' in params and params ['learning_rate' ] > 0 :
64
64
K .set_value (optimizer .lr , params ['learning_rate' ])
65
65
print ('Done setting optimizer {} learning rate to {}' .format (
66
- params ['optimizer' ],params ['learning_rate' ]))
66
+ params ['optimizer' ], params ['learning_rate' ]))
67
67
68
68
model .compile (loss = 'mean_squared_error' ,
69
69
optimizer = optimizer ,
@@ -76,12 +76,12 @@ def run(params):
76
76
reduce_lr = ReduceLROnPlateau (monitor = 'val_loss' , factor = 0.75 , patience = 20 , verbose = 1 , mode = 'auto' , epsilon = 0.0001 , cooldown = 3 , min_lr = 0.000000001 )
77
77
early_stop = EarlyStopping (monitor = 'val_loss' , patience = 100 , verbose = 1 , mode = 'auto' )
78
78
79
- history = model .fit (x_train , y_train ,
80
- batch_size = params ['batch_size' ],
81
- epochs = params ['epochs' ],
82
- verbose = 1 ,
83
- validation_data = (x_val , y_val ),
84
- callbacks = [checkpointer , csv_logger , reduce_lr , early_stop ])
79
+ model .fit (x_train , y_train ,
80
+ batch_size = params ['batch_size' ],
81
+ epochs = params ['epochs' ],
82
+ verbose = 1 ,
83
+ validation_data = (x_val , y_val ),
84
+ callbacks = [checkpointer , csv_logger , reduce_lr , early_stop ])
85
85
86
86
model .load_weights ('smile_regress.autosave.model.h5' )
87
87
0 commit comments