Skip to content

Commit 95d4d93

Browse files
committed
fix exception too broad issue.
1 parent bd871f2 commit 95d4d93

File tree

10 files changed

+20
-20
lines changed

10 files changed

+20
-20
lines changed

example/tutorial_cifar10_tfrecord.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -121,7 +121,7 @@ def read_and_decode(filename, is_train=None):
121121
# 5. Subtract off the mean and divide by the variance of the pixels.
122122
try: # TF 0.12+
123123
img = tf.image.per_image_standardization(img)
124-
except: # earlier TF versions
124+
except Exception: # earlier TF versions
125125
img = tf.image.per_image_whitening(img)
126126

127127
elif is_train == False:
@@ -130,7 +130,7 @@ def read_and_decode(filename, is_train=None):
130130
# 2. Subtract off the mean and divide by the variance of the pixels.
131131
try: # TF 0.12+
132132
img = tf.image.per_image_standardization(img)
133-
except: # earlier TF versions
133+
except Exception: # earlier TF versions
134134
img = tf.image.per_image_whitening(img)
135135
elif is_train == None:
136136
img = img

example/tutorial_inceptionV3_tfslim.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -123,7 +123,7 @@ def print_prob(prob):
123123
exit()
124124
try: # TF12+
125125
saver.restore(sess, "./inception_v3.ckpt")
126-
except: # TF11
126+
except Exception: # TF11
127127
saver.restore(sess, "inception_v3.ckpt")
128128
print("Model Restored")
129129

tensorlayer/db.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -148,7 +148,7 @@ def find_one_params(self, args=None, sort=None):
148148
params = self.__deserialization(self.paramsfs.get(f_id).read())
149149
print("[TensorDB] Find one params SUCCESS, {} took: {}s".format(args, round(time.time() - s, 2)))
150150
return params, f_id
151-
except:
151+
except Exception:
152152
return False, False
153153

154154
@AutoFill

tensorlayer/files.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1180,7 +1180,7 @@ def save_npz(save_list=None, name='model.npz', sess=None):
11801180
try:
11811181
for k, value in enumerate(save_list):
11821182
save_list_var.append(value.eval())
1183-
except:
1183+
except Exception:
11841184
logging.info(" Fail to save model, Hint: pass the session into this function, tl.files.save_npz(network.all_params, name='model.npz', sess=sess)")
11851185
np.savez(name, params=save_list_var)
11861186
save_list_var = None

tensorlayer/layers/core.py

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -22,7 +22,7 @@
2222

2323
try: # For TF12 and later
2424
TF_GRAPHKEYS_VARIABLES = tf.GraphKeys.GLOBAL_VARIABLES
25-
except: # For TF11 and before
25+
except Exception: # For TF11 and before
2626
TF_GRAPHKEYS_VARIABLES = tf.GraphKeys.VARIABLES
2727

2828

@@ -152,7 +152,7 @@ def initialize_rnn_state(state, feed_dict=None):
152152
"""
153153
try: # TF1.0
154154
LSTMStateTuple = tf.contrib.rnn.LSTMStateTuple
155-
except:
155+
except Exception:
156156
LSTMStateTuple = tf.nn.rnn_cell.LSTMStateTuple
157157

158158
if isinstance(state, LSTMStateTuple):
@@ -183,7 +183,7 @@ def print_all_variables(train_only=False):
183183
else:
184184
try: # TF1.0+
185185
t_vars = tf.global_variables()
186-
except: # TF0.12
186+
except Exception: # TF0.12
187187
t_vars = tf.all_variables()
188188
logging.info(" [*] printing global variables")
189189
for idx, v in enumerate(t_vars):
@@ -998,7 +998,7 @@ def __init__(
998998
p_hat = tf.reduce_mean(activation_out, 0) # theano: p_hat = T.mean( self.a[i], axis=0 )
999999
try: # TF1.0
10001000
KLD = beta * tf.reduce_sum(rho * tf.log(tf.divide(rho, p_hat)) + (1 - rho) * tf.log((1 - rho) / (tf.subtract(float(1), p_hat))))
1001-
except: # TF0.12
1001+
except Exception: # TF0.12
10021002
KLD = beta * tf.reduce_sum(rho * tf.log(tf.div(rho, p_hat)) + (1 - rho) * tf.log((1 - rho) / (tf.sub(float(1), p_hat))))
10031003
# KLD = beta * tf.reduce_sum( rho * tf.log(rho/ p_hat) + (1- rho) * tf.log((1- rho)/(1- p_hat)) )
10041004
# theano: L1_a = l1_a[i] * T.sum( rho[i] * T.log(rho[i]/ p_hat) + (1- rho[i]) * T.log((1- rho[i])/(1- p_hat)) )

tensorlayer/layers/extend.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -32,7 +32,7 @@ def __init__(
3232
with tf.variable_scope(name) as vs:
3333
try: # TF12 TF1.0
3434
self.outputs = tf.expand_dims(self.inputs, axis=axis)
35-
except: # TF11
35+
except Exception: # TF11
3636
self.outputs = tf.expand_dims(self.inputs, dim=axis)
3737
self.all_layers = list(layer.all_layers)
3838
self.all_params = list(layer.all_params)

tensorlayer/layers/flow_control.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -61,7 +61,7 @@ def __init__(self, layers, name='mux_layer'):
6161
self.inputs.append(l.outputs)
6262
try: # TF1.0
6363
all_inputs = tf.stack(self.inputs, name=name) # pack means concat a list of tensor in a new dim # 1.2
64-
except:
64+
except Exception:
6565
all_inputs = tf.pack(self.inputs, name=name) # pack means concat a list of tensor in a new dim # 1.2
6666

6767
logging.info("MultiplexerLayer %s: n_inputs:%d" % (self.name, self.n_inputs))

tensorlayer/layers/merge.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -52,7 +52,7 @@ def __init__(
5252
self.inputs.append(l.outputs)
5353
try: # TF1.0
5454
self.outputs = tf.concat(self.inputs, concat_dim, name=name)
55-
except: # TF0.12
55+
except Exception: # TF0.12
5656
self.outputs = tf.concat(concat_dim, self.inputs, name=name)
5757

5858
logging.info("ConcatLayer %s: axis: %d" % (self.name, concat_dim))

tensorlayer/layers/recurrent.py

Lines changed: 7 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -1268,7 +1268,7 @@ def __init__(
12681268
if 'GRU' in cell_fn.__name__:
12691269
try:
12701270
cell_init_args.pop('state_is_tuple')
1271-
except:
1271+
except Exception:
12721272
pass
12731273
self.inputs = layer.outputs
12741274

@@ -1278,7 +1278,7 @@ def __init__(
12781278
# Input dimension should be rank 3 [batch_size, n_steps(max), n_features]
12791279
try:
12801280
self.inputs.get_shape().with_rank(3)
1281-
except:
1281+
except Exception:
12821282
raise Exception("RNN : Input dimension should be rank 3 : [batch_size, n_steps(max), n_features]")
12831283

12841284
# Get the batch_size
@@ -1308,7 +1308,7 @@ def __init__(
13081308
raise Exception("Invalid dropout type (must be a 2-D tuple of " "float)")
13091309
try:
13101310
DropoutWrapper_fn = tf.contrib.rnn.DropoutWrapper
1311-
except:
1311+
except Exception:
13121312
DropoutWrapper_fn = tf.nn.rnn_cell.DropoutWrapper
13131313

13141314
# cell_instance_fn1=cell_instance_fn # HanSheng
@@ -1337,7 +1337,7 @@ def __init__(
13371337
if sequence_length is None:
13381338
try: # TF1.0
13391339
sequence_length = retrieve_seq_length_op(self.inputs if isinstance(self.inputs, tf.Tensor) else tf.stack(self.inputs))
1340-
except: # TF0.12
1340+
except Exception: # TF0.12
13411341
sequence_length = retrieve_seq_length_op(self.inputs if isinstance(self.inputs, tf.Tensor) else tf.pack(self.inputs))
13421342

13431343
if n_layer > 1:
@@ -1373,7 +1373,7 @@ def __init__(
13731373
# Manage the outputs
13741374
try: # TF1.0
13751375
outputs = tf.concat(outputs, 2)
1376-
except: # TF0.12
1376+
except Exception: # TF0.12
13771377
outputs = tf.concat(2, outputs)
13781378
if return_last:
13791379
# [batch_size, 2 * n_hidden]
@@ -1386,7 +1386,7 @@ def __init__(
13861386
# 2D Tensor [n_example, 2 * n_hidden]
13871387
try: # TF1.0
13881388
self.outputs = tf.reshape(tf.concat(outputs, 1), [-1, 2 * n_hidden])
1389-
except: # TF0.12
1389+
except Exception: # TF0.12
13901390
self.outputs = tf.reshape(tf.concat(1, outputs), [-1, 2 * n_hidden])
13911391
else:
13921392
# <akara>:
@@ -1546,7 +1546,7 @@ def __init__(
15461546
if 'GRU' in cell_fn.__name__:
15471547
try:
15481548
cell_init_args.pop('state_is_tuple')
1549-
except:
1549+
except Exception:
15501550
pass
15511551
# self.inputs = layer.outputs
15521552
logging.info(" [**] Seq2Seq %s: n_hidden:%d cell_fn:%s dropout:%s n_layer:%d" % (self.name, n_hidden, cell_fn.__name__, dropout, n_layer))

tensorlayer/nlp.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1048,7 +1048,7 @@ def moses_multi_bleu(hypotheses, references, lowercase=False):
10481048
try:
10491049
multi_bleu_path, _ = urllib.request.urlretrieve("https://raw.githubusercontent.com/moses-smt/mosesdecoder/" "master/scripts/generic/multi-bleu.perl")
10501050
os.chmod(multi_bleu_path, 0o755)
1051-
except: # pylint: disable=W0702
1051+
except Exception: # pylint: disable=W0702
10521052
tf.logging.info("Unable to fetch multi-bleu.perl script, using local.")
10531053
metrics_dir = os.path.dirname(os.path.realpath(__file__))
10541054
bin_dir = os.path.abspath(os.path.join(metrics_dir, "..", "..", "bin"))

0 commit comments

Comments
 (0)