|
4 | 4 |
|
5 | 5 | import numpy as np |
6 | 6 | import tensorflow as tf |
| 7 | +from tensorflow.python.util.deprecation import deprecated |
7 | 8 |
|
8 | 9 | from .. import _logging as logging |
9 | 10 | from .. import files, iterate, utils, visualize |
|
12 | 13 | class LayersConfig: |
13 | 14 | tf_dtype = tf.float32 # TensorFlow DType |
14 | 15 | set_keep = {} # A dictionary for holding tf.placeholders |
15 | | - _layer_name_list = [] # A list of used layer names |
16 | | - _name_reuse = False # Boolean to indicate if layer names can be reused |
17 | 16 |
|
18 | 17 |
|
19 | 18 | try: # For TF12 and later |
@@ -60,73 +59,14 @@ def flatten_reshape(variable, name='flatten'): |
60 | 59 | return tf.reshape(variable, shape=[-1, dim], name=name) |
61 | 60 |
|
62 | 61 |
|
| 62 | +@deprecated("2018-06-30", "TensorLayer relies on TensorFlow to check naming.") |
63 | 63 | def clear_layers_name(): |
64 | | - """Clear all layer names in LayersConfig |
65 | | -
|
66 | | - Examples |
67 | | - --------- |
68 | | - Clean the current graph and try to re-define model. |
69 | | -
|
70 | | - >>> for .... (different model settings): |
71 | | - >>> with tf.Graph().as_default() as graph: # clear all variables of TF |
72 | | - >>> tl.layers.clear_layers_name() # clear all layer name of TL |
73 | | - >>> sess = tf.InteractiveSession() |
74 | | - >>> # define and train a model here |
75 | | - >>> sess.close() |
76 | | -
|
77 | | - Enable reusing layer names. |
78 | | -
|
79 | | - >>> net = tl.layers.InputLayer(x, name='input_layer') |
80 | | - >>> net = tl.layers.DenseLayer(net, n_units=800, name='relu1') |
81 | | - ... |
82 | | - >>> tl.layers.clear_layers_name() |
83 | | - >>> net2 = tl.layers.InputLayer(x, name='input_layer') |
84 | | - >>> net2 = tl.layers.DenseLayer(net2, n_units=800, name='relu1') |
85 | | -
|
86 | | - """ |
87 | | - LayersConfig._layer_name_list = [] |
| 64 | + logging.warning('this method is DEPRECATED and has no effect, please remove it from your code.') |
88 | 65 |
|
89 | 66 |
|
| 67 | +@deprecated("2018-06-30", "TensorLayer relies on TensorFlow to check name reusing.") |
90 | 68 | def set_name_reuse(enable=True): |
91 | | - """Enable or disable reuse layer name. |
92 | | -
|
93 | | - By default, each layer must has unique |
94 | | - name. When you want two or more input placeholder (inference) share the same |
95 | | - model parameters, you need to enable layer name reuse, then allow the |
96 | | - parameters have same name scope. |
97 | | -
|
98 | | - Parameters |
99 | | - ---------- |
100 | | - enable : boolean |
101 | | - Enable or disable name/layer reuse, None means False. |
102 | | -
|
103 | | - Examples |
104 | | - -------- |
105 | | - >>> def embed_seq(input_seqs, is_train, reuse): |
106 | | - >>> with tf.variable_scope("model", reuse=reuse): |
107 | | - >>> tl.layers.set_name_reuse(reuse) |
108 | | - >>> net = tl.layers.EmbeddingInputlayer( |
109 | | - ... inputs = input_seqs, |
110 | | - ... vocabulary_size = vocab_size, |
111 | | - ... embedding_size = embedding_size, |
112 | | - ... name = 'e_embedding') |
113 | | - >>> net = tl.layers.DynamicRNNLayer(net, |
114 | | - ... cell_fn = tf.contrib.rnn.BasicLSTMCell, |
115 | | - ... n_hidden = embedding_size, |
116 | | - ... dropout = (0.7 if is_train else None), |
117 | | - ... initializer = w_init, |
118 | | - ... sequence_length = tl.layers.retrieve_seq_length_op2(input_seqs), |
119 | | - ... return_last = True, |
120 | | - ... name = 'e_dynamicrnn') |
121 | | - >>> return net |
122 | | - >>> |
123 | | - >>> net_train = embed_seq(t_caption, is_train=True, reuse=False) |
124 | | - >>> net_test = embed_seq(t_caption, is_train=False, reuse=True) |
125 | | -
|
126 | | - - see ``tutorial_ptb_lstm.py`` for example. |
127 | | -
|
128 | | - """ |
129 | | - LayersConfig._name_reuse = enable |
| 69 | + logging.warning('this method is DEPRECATED and has no effect, please remove it from your code.') |
130 | 70 |
|
131 | 71 |
|
132 | 72 | def initialize_rnn_state(state, feed_dict=None): |
@@ -383,15 +323,7 @@ def __init__(self, inputs=None, name='layer'): |
383 | 323 | scope_name = tf.get_variable_scope().name |
384 | 324 | if scope_name: |
385 | 325 | name = scope_name + '/' + name |
386 | | - if (name in LayersConfig._layer_name_list) and LayersConfig._name_reuse is False: |
387 | | - raise Exception("Layer '%s' already exists, please choice other 'name' or reuse this layer\ |
388 | | - \nHint : Use different name for different 'Layer' (The name is used to control parameter sharing)\ |
389 | | - \nAdditional Informations: http://tensorlayer.readthedocs.io/en/latest/modules/layers.html?highlight=clear_layers_name#tensorlayer.layers.clear_layers_name" |
390 | | - % name) |
391 | | - else: |
392 | | - self.name = name |
393 | | - if name not in ['', None, False]: |
394 | | - LayersConfig._layer_name_list.append(name) |
| 326 | + self.name = name |
395 | 327 |
|
396 | 328 | def print_params(self, details=True, session=None): |
397 | 329 | """Print all info of parameters in the network""" |
@@ -435,9 +367,7 @@ def __str__(self): |
435 | 367 | return " Last layer is: %s (%s) %s" % (self.__class__.__name__, self.name, self.outputs.get_shape().as_list()) |
436 | 368 |
|
437 | 369 | def __getitem__(self, key): |
438 | | - set_name_reuse(True) |
439 | 370 | net_new = Layer(self.inputs, name=self.name) |
440 | | - set_name_reuse(LayersConfig._name_reuse) # set back |
441 | 371 | net_new.outputs = self.outputs[key] |
442 | 372 |
|
443 | 373 | net_new.all_layers = list(self.all_layers[:-1]) |
|
0 commit comments