Skip to content

Commit eb88fe6

Browse files
committed
flake8 fix
1 parent 59b0a1d commit eb88fe6

File tree

2 files changed

+17
-16
lines changed

2 files changed

+17
-16
lines changed

examples/xform-smiles/srt_baseline_keras.py

Lines changed: 11 additions & 11 deletions
Original file line numberDiff line numberDiff line change
@@ -55,15 +55,15 @@ def run(params):
5555
optimizer = optimizers.deserialize({'class_name': params['optimizer'], 'config': {}})
5656

5757
# I don't know why we set base_lr. It doesn't appear to be used.
58-
if 'base_lr' in params and params['base_lr'] > 0:
59-
base_lr = params['base_lr']
60-
else:
61-
base_lr = K.get_value(optimizer.lr)
58+
# if 'base_lr' in params and params['base_lr'] > 0:
59+
# base_lr = params['base_lr']
60+
# else:
61+
# base_lr = K.get_value(optimizer.lr)
6262

6363
if 'learning_rate' in params and params['learning_rate'] > 0:
6464
K.set_value(optimizer.lr, params['learning_rate'])
6565
print('Done setting optimizer {} learning rate to {}'.format(
66-
params['optimizer'],params['learning_rate']))
66+
params['optimizer'], params['learning_rate']))
6767

6868
model.compile(loss='mean_squared_error',
6969
optimizer=optimizer,
@@ -76,12 +76,12 @@ def run(params):
7676
reduce_lr = ReduceLROnPlateau(monitor='val_loss', factor=0.75, patience=20, verbose=1, mode='auto', epsilon=0.0001, cooldown=3, min_lr=0.000000001)
7777
early_stop = EarlyStopping(monitor='val_loss', patience=100, verbose=1, mode='auto')
7878

79-
history = model.fit(x_train, y_train,
80-
batch_size=params['batch_size'],
81-
epochs=params['epochs'],
82-
verbose=1,
83-
validation_data=(x_val, y_val),
84-
callbacks=[checkpointer, csv_logger, reduce_lr, early_stop])
79+
model.fit(x_train, y_train,
80+
batch_size=params['batch_size'],
81+
epochs=params['epochs'],
82+
verbose=1,
83+
validation_data=(x_val, y_val),
84+
callbacks=[checkpointer, csv_logger, reduce_lr, early_stop])
8585

8686
model.load_weights('smile_regress.autosave.model.h5')
8787

examples/xform-smiles/srt_baseline_keras2.py

Lines changed: 6 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -55,15 +55,15 @@ def run(params):
5555
optimizer = optimizers.deserialize({'class_name': params['optimizer'], 'config': {}})
5656

5757
# I don't know why we set base_lr. It doesn't appear to be used.
58-
if 'base_lr' in params and params['base_lr'] > 0:
59-
base_lr = params['base_lr']
60-
else:
61-
base_lr = K.get_value(optimizer.lr)
58+
# if 'base_lr' in params and params['base_lr'] > 0:
59+
# base_lr = params['base_lr']
60+
# else:
61+
# base_lr = K.get_value(optimizer.lr)
6262

6363
if 'learning_rate' in params and params['learning_rate'] > 0:
6464
K.set_value(optimizer.lr, params['learning_rate'])
6565
print('Done setting optimizer {} learning rate to {}'.format(
66-
params['optimizer'],params['learning_rate']))
66+
params['optimizer'], params['learning_rate']))
6767

6868
model.compile(loss='mean_squared_error',
6969
optimizer=optimizer,
@@ -87,6 +87,7 @@ def run(params):
8787

8888
return history
8989

90+
9091
def main():
9192
params = initialize_parameters()
9293
run(params)

0 commit comments

Comments
 (0)