11
11
# import matplotlib.pyplot as plt
12
12
13
13
from tensorflow .keras import backend as K
14
- from tensorflow .keras .optimizers import Adam # RMSprop, SGD
14
+ import tensorflow .keras .optimizers as optimizers
15
15
from tensorflow .keras .callbacks import ModelCheckpoint , CSVLogger , ReduceLROnPlateau , EarlyStopping
16
16
17
17
file_path = os .path .dirname (os .path .realpath (__file__ ))
21
21
import candle
22
22
import smiles_transformer as st
23
23
24
+ import tensorflow .config .experimental
25
+ gpus = tensorflow .config .experimental .list_physical_devices ('GPU' )
26
+ try :
27
+ for gpu in gpus :
28
+ print ("setting memory growth" )
29
+ tensorflow .config .experimental .set_memory_growth (gpu , True )
30
+ except RuntimeError as e :
31
+ print (e )
32
+
24
33
25
34
def initialize_parameters (default_model = 'regress_default_model.txt' ):
26
35
@@ -43,8 +52,21 @@ def run(params):
43
52
44
53
model = st .transformer_model (params )
45
54
55
+ optimizer = optimizers .deserialize ({'class_name' : params ['optimizer' ], 'config' : {}})
56
+
57
+ # I don't know why we set base_lr. It doesn't appear to be used.
58
+ if 'base_lr' in params and params ['base_lr' ] > 0 :
59
+ base_lr = params ['base_lr' ]
60
+ else :
61
+ base_lr = K .get_value (optimizer .lr )
62
+
63
+ if 'learning_rate' in params and params ['learning_rate' ] > 0 :
64
+ K .set_value (optimizer .lr , params ['learning_rate' ])
65
+ print ('Done setting optimizer {} learning rate to {}' .format (
66
+ params ['optimizer' ],params ['learning_rate' ]))
67
+
46
68
model .compile (loss = 'mean_squared_error' ,
47
- optimizer = Adam ( lr = 0.00001 ) ,
69
+ optimizer = optimizer ,
48
70
metrics = ['mae' , st .r2 ])
49
71
50
72
# set up a bunch of callbacks to do work during model training..
0 commit comments