Skip to content

Commit 4303979

Browse files
luomaizsdonghao
authored andcommitted
Remove the use of global variables. (#388)
* fixed bug of time distributed layer and release 2 tests * speed up test mnist * fixed test file name * remove unused code
1 parent 256fa2c commit 4303979

13 files changed

+87
-92
lines changed

.travis.yml

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -20,3 +20,5 @@ script:
2020
- python tests/test_pydocstyle.py
2121
- python tests/test_mnist_simple.py
2222
- python tests/test_pooling.py
23+
- python tests/test_reuse_mlp.py
24+
- python tests/test_time_distributed.py

example/tutorial_cifar10.py

Lines changed: 0 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -21,7 +21,6 @@ def model(x, y_, reuse):
2121
W_init2 = tf.truncated_normal_initializer(stddev=0.04)
2222
b_init2 = tf.constant_initializer(value=0.1)
2323
with tf.variable_scope("model", reuse=reuse):
24-
tl.layers.set_name_reuse(reuse)
2524
net = InputLayer(x, name='input')
2625
net = Conv2d(net, 64, (5, 5), (1, 1), act=tf.nn.relu, padding='SAME', W_init=W_init, name='cnn1')
2726
# net = Conv2dLayer(net, act=tf.nn.relu, shape=[5, 5, 3, 64],
@@ -69,7 +68,6 @@ def model_batch_norm(x, y_, reuse, is_train):
6968
W_init2 = tf.truncated_normal_initializer(stddev=0.04)
7069
b_init2 = tf.constant_initializer(value=0.1)
7170
with tf.variable_scope("model", reuse=reuse):
72-
tl.layers.set_name_reuse(reuse)
7371
net = InputLayer(x, name='input')
7472

7573
net = Conv2d(net, 64, (5, 5), (1, 1), padding='SAME', W_init=W_init, b_init=None, name='cnn1')

example/tutorial_cifar10_tfrecord.py

Lines changed: 1 addition & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -44,6 +44,7 @@
4444

4545
# import numpy as np
4646
import tensorflow as tf
47+
4748
import tensorlayer as tl
4849
from tensorlayer.layers import *
4950

@@ -185,7 +186,6 @@ def model(x_crop, y_, reuse):
185186
W_init2 = tf.truncated_normal_initializer(stddev=0.04)
186187
b_init2 = tf.constant_initializer(value=0.1)
187188
with tf.variable_scope("model", reuse=reuse):
188-
tl.layers.set_name_reuse(reuse)
189189
net = InputLayer(x_crop, name='input')
190190
net = Conv2d(net, 64, (5, 5), (1, 1), act=tf.nn.relu, padding='SAME', W_init=W_init, name='cnn1')
191191
# net = Conv2dLayer(net, act=tf.nn.relu, shape=[5, 5, 3, 64],
@@ -233,7 +233,6 @@ def model_batch_norm(x_crop, y_, reuse, is_train):
233233
W_init2 = tf.truncated_normal_initializer(stddev=0.04)
234234
b_init2 = tf.constant_initializer(value=0.1)
235235
with tf.variable_scope("model", reuse=reuse):
236-
tl.layers.set_name_reuse(reuse)
237236
net = InputLayer(x_crop, name='input')
238237

239238
net = Conv2d(net, 64, (5, 5), (1, 1), padding='SAME', W_init=W_init, b_init=None, name='cnn1')

example/tutorial_generate_text.py

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -231,7 +231,6 @@ def inference(x, is_train, sequence_length, reuse=None):
231231
print("\nsequence_length: %d, is_train: %s, reuse: %s" % (sequence_length, is_train, reuse))
232232
rnn_init = tf.random_uniform_initializer(-init_scale, init_scale)
233233
with tf.variable_scope("model", reuse=reuse):
234-
tl.layers.set_name_reuse(reuse)
235234
network = EmbeddingInputlayer(inputs=x, vocabulary_size=vocab_size, embedding_size=hidden_size, E_init=rnn_init, name='embedding')
236235
network = RNNLayer(
237236
network,

example/tutorial_mlp_dropout2.py

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -18,7 +18,6 @@
1818
# define the network
1919
def mlp(x, is_train=True, reuse=False):
2020
with tf.variable_scope("MLP", reuse=reuse):
21-
tl.layers.set_name_reuse(reuse)
2221
network = tl.layers.InputLayer(x, name='input')
2322
network = tl.layers.DropoutLayer(network, keep=0.8, is_fix=True, is_train=is_train, name='drop1')
2423
network = tl.layers.DenseLayer(network, n_units=800, act=tf.nn.relu, name='relu1')

example/tutorial_mnist_float16.py

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -20,7 +20,6 @@
2020

2121
def model(x, is_train=True, reuse=False):
2222
with tf.variable_scope("model", reuse=reuse):
23-
tl.layers.set_name_reuse(reuse)
2423
n = InputLayer(x, name='input')
2524
# cnn
2625
n = Conv2d(n, 32, (5, 5), (1, 1), padding='SAME', name='cnn1')

example/tutorial_ptb_lstm.py

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -194,7 +194,6 @@ def inference(x, is_training, num_steps, reuse=None):
194194
print("\nnum_steps : %d, is_training : %s, reuse : %s" % (num_steps, is_training, reuse))
195195
initializer = tf.random_uniform_initializer(-init_scale, init_scale)
196196
with tf.variable_scope("model", reuse=reuse):
197-
tl.layers.set_name_reuse(reuse)
198197
network = tl.layers.EmbeddingInputlayer(inputs=x, vocabulary_size=vocab_size, embedding_size=hidden_size, E_init=initializer, name='embedding')
199198
network = tl.layers.DropoutLayer(network, keep=keep_prob, is_fix=True, is_train=is_training, name='drop1')
200199
network = tl.layers.RNNLayer(

example/tutorial_ptb_lstm_state_is_tuple.py

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -194,7 +194,6 @@ def inference(x, is_training, num_steps, reuse=None):
194194
print("\nnum_steps : %d, is_training : %s, reuse : %s" % (num_steps, is_training, reuse))
195195
initializer = tf.random_uniform_initializer(-init_scale, init_scale)
196196
with tf.variable_scope("model", reuse=reuse):
197-
tl.layers.set_name_reuse(reuse)
198197
network = tl.layers.EmbeddingInputlayer(inputs=x, vocabulary_size=vocab_size, embedding_size=hidden_size, E_init=initializer, name='embedding')
199198
network = tl.layers.DropoutLayer(network, keep=keep_prob, is_fix=True, is_train=is_training, name='drop1')
200199
network = tl.layers.RNNLayer(

tensorlayer/layers/core.py

Lines changed: 6 additions & 76 deletions
Original file line numberDiff line numberDiff line change
@@ -4,6 +4,7 @@
44

55
import numpy as np
66
import tensorflow as tf
7+
from tensorflow.python.util.deprecation import deprecated
78

89
from .. import _logging as logging
910
from .. import files, iterate, utils, visualize
@@ -12,8 +13,6 @@
1213
class LayersConfig:
1314
tf_dtype = tf.float32 # TensorFlow DType
1415
set_keep = {} # A dictionary for holding tf.placeholders
15-
_layer_name_list = [] # A list of used layer names
16-
_name_reuse = False # Boolean to indicate if layer names can be reused
1716

1817

1918
try: # For TF12 and later
@@ -60,73 +59,14 @@ def flatten_reshape(variable, name='flatten'):
6059
return tf.reshape(variable, shape=[-1, dim], name=name)
6160

6261

62+
@deprecated("2018-06-30", "TensorLayer relies on TensorFlow to check naming.")
6363
def clear_layers_name():
64-
"""Clear all layer names in LayersConfig
65-
66-
Examples
67-
---------
68-
Clean the current graph and try to re-define model.
69-
70-
>>> for .... (different model settings):
71-
>>> with tf.Graph().as_default() as graph: # clear all variables of TF
72-
>>> tl.layers.clear_layers_name() # clear all layer name of TL
73-
>>> sess = tf.InteractiveSession()
74-
>>> # define and train a model here
75-
>>> sess.close()
76-
77-
Enable reusing layer names.
78-
79-
>>> net = tl.layers.InputLayer(x, name='input_layer')
80-
>>> net = tl.layers.DenseLayer(net, n_units=800, name='relu1')
81-
...
82-
>>> tl.layers.clear_layers_name()
83-
>>> net2 = tl.layers.InputLayer(x, name='input_layer')
84-
>>> net2 = tl.layers.DenseLayer(net2, n_units=800, name='relu1')
85-
86-
"""
87-
LayersConfig._layer_name_list = []
64+
logging.warning('this method is DEPRECATED and has no effect, please remove it from your code.')
8865

8966

67+
@deprecated("2018-06-30", "TensorLayer relies on TensorFlow to check name reusing.")
9068
def set_name_reuse(enable=True):
91-
"""Enable or disable reuse layer name.
92-
93-
By default, each layer must has unique
94-
name. When you want two or more input placeholder (inference) share the same
95-
model parameters, you need to enable layer name reuse, then allow the
96-
parameters have same name scope.
97-
98-
Parameters
99-
----------
100-
enable : boolean
101-
Enable or disable name/layer reuse, None means False.
102-
103-
Examples
104-
--------
105-
>>> def embed_seq(input_seqs, is_train, reuse):
106-
>>> with tf.variable_scope("model", reuse=reuse):
107-
>>> tl.layers.set_name_reuse(reuse)
108-
>>> net = tl.layers.EmbeddingInputlayer(
109-
... inputs = input_seqs,
110-
... vocabulary_size = vocab_size,
111-
... embedding_size = embedding_size,
112-
... name = 'e_embedding')
113-
>>> net = tl.layers.DynamicRNNLayer(net,
114-
... cell_fn = tf.contrib.rnn.BasicLSTMCell,
115-
... n_hidden = embedding_size,
116-
... dropout = (0.7 if is_train else None),
117-
... initializer = w_init,
118-
... sequence_length = tl.layers.retrieve_seq_length_op2(input_seqs),
119-
... return_last = True,
120-
... name = 'e_dynamicrnn')
121-
>>> return net
122-
>>>
123-
>>> net_train = embed_seq(t_caption, is_train=True, reuse=False)
124-
>>> net_test = embed_seq(t_caption, is_train=False, reuse=True)
125-
126-
- see ``tutorial_ptb_lstm.py`` for example.
127-
128-
"""
129-
LayersConfig._name_reuse = enable
69+
logging.warning('this method is DEPRECATED and has no effect, please remove it from your code.')
13070

13171

13272
def initialize_rnn_state(state, feed_dict=None):
@@ -383,15 +323,7 @@ def __init__(self, inputs=None, name='layer'):
383323
scope_name = tf.get_variable_scope().name
384324
if scope_name:
385325
name = scope_name + '/' + name
386-
if (name in LayersConfig._layer_name_list) and LayersConfig._name_reuse is False:
387-
raise Exception("Layer '%s' already exists, please choice other 'name' or reuse this layer\
388-
\nHint : Use different name for different 'Layer' (The name is used to control parameter sharing)\
389-
\nAdditional Informations: http://tensorlayer.readthedocs.io/en/latest/modules/layers.html?highlight=clear_layers_name#tensorlayer.layers.clear_layers_name"
390-
% name)
391-
else:
392-
self.name = name
393-
if name not in ['', None, False]:
394-
LayersConfig._layer_name_list.append(name)
326+
self.name = name
395327

396328
def print_params(self, details=True, session=None):
397329
"""Print all info of parameters in the network"""
@@ -435,9 +367,7 @@ def __str__(self):
435367
return " Last layer is: %s (%s) %s" % (self.__class__.__name__, self.name, self.outputs.get_shape().as_list())
436368

437369
def __getitem__(self, key):
438-
set_name_reuse(True)
439370
net_new = Layer(self.inputs, name=self.name)
440-
set_name_reuse(LayersConfig._name_reuse) # set back
441371
net_new.outputs = self.outputs[key]
442372

443373
net_new.all_layers = list(self.all_layers[:-1])

tensorlayer/layers/time_distribution.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -63,9 +63,9 @@ def __init__(
6363
timestep = input_shape[1]
6464
x = tf.unstack(self.inputs, axis=1)
6565

66+
is_name_reuse = tf.get_variable_scope().reuse
6667
for i in range(0, timestep):
67-
with tf.variable_scope(name, reuse=(LayersConfig._name_reuse if i == 0 else True)) as vs:
68-
set_name_reuse((LayersConfig._name_reuse if i == 0 else True))
68+
with tf.variable_scope(name, reuse=(is_name_reuse if i == 0 else True)) as vs:
6969
net = layer_class(InputLayer(x[i], name=args['name'] + str(i)), **args)
7070
x[i] = net.outputs
7171
variables = tf.get_collection(TF_GRAPHKEYS_VARIABLES, scope=vs.name)

0 commit comments

Comments
 (0)