@@ -149,11 +149,13 @@ def print_all_variables(train_only=False):
149149 # tvar = tf.trainable_variables() if train_only else tf.all_variables()
150150 if train_only :
151151 t_vars = tf .trainable_variables ()
152+ print (" [*] printing trainable variables" )
152153 else :
153154 try : # TF1.0
154155 t_vars = tf .global_variables ()
155156 except : # TF0.12
156157 t_vars = tf .all_variables ()
158+ print (" [*] printing global variables" )
157159 for idx , v in enumerate (t_vars ):
158160 print (" var {:3}: {:15} {}" .format (idx , str (v .get_shape ()), v .name ))
159161
@@ -165,7 +167,7 @@ def get_variables_with_name(name, train_only=True, printable=False):
165167 ---------
166168 >>> dense_vars = get_variable_with_name('dense', True, True)
167169 """
168- print (" Get variables with %s" % name )
170+ print (" [*] geting variables with %s" % name )
169171 # tvar = tf.trainable_variables() if train_only else tf.all_variables()
170172 if train_only :
171173 t_vars = tf .trainable_variables ()
@@ -788,7 +790,7 @@ def pretrain(self, sess, x, X_train, X_val, denoise_name=None, n_epoch=100, batc
788790 # get your own pre-train method.
789791 #
790792 # ====================================================
791- print (" [TL] %s start pretrain" % self .name )
793+ print (" [*] %s start pretrain" % self .name )
792794 print (" batch_size: %d" % batch_size )
793795 if denoise_name :
794796 print (" denoising layer keep: %f" % self .all_drop [set_keep [denoise_name ]])
0 commit comments