Skip to content

Commit 8bd7eb7

Browse files
committed
[layers] noise layer with is_train
1 parent d4bd923 commit 8bd7eb7

File tree

1 file changed

+48
-26
lines changed

1 file changed

+48
-26
lines changed

tensorlayer/layers.py

Lines changed: 48 additions & 26 deletions
Original file line numberDiff line numberDiff line change
@@ -828,6 +828,8 @@ class DropoutLayer(Layer):
828828
The keeping probability, the lower more values will be set to zero.
829829
is_fix : boolean
830830
Default False, if True, the keeping probability is fixed and cannot be changed via feed_dict.
831+
is_train : boolean
832+
If False, skip this layer, default is True.
831833
name : a string or None
832834
An optional name to attach to this layer.
833835
@@ -863,26 +865,34 @@ def __init__(
863865
layer = None,
864866
keep = 0.5,
865867
is_fix = False,
868+
is_train = True,
866869
name = 'dropout_layer',
867870
):
868871
Layer.__init__(self, name=name)
869-
self.inputs = layer.outputs
870-
print(" tensorlayer:Instantiate DropoutLayer %s: keep: %f is_fix: %s" % (self.name, keep, is_fix))
871-
872-
# The name of placeholder for keep_prob is the same with the name
873-
# of the Layer.
874-
if is_fix:
875-
self.outputs = tf.nn.dropout(self.inputs, keep, name=name)
872+
if is_train is False:
873+
print(" tensorlayer:skip DropoutLayer")
874+
self.outputs = layer.outputs
875+
self.all_layers = list(layer.all_layers)
876+
self.all_params = list(layer.all_params)
877+
self.all_drop = dict(layer.all_drop)
876878
else:
877-
set_keep[name] = tf.placeholder(tf.float32)
878-
self.outputs = tf.nn.dropout(self.inputs, set_keep[name], name=name) # 1.2
879+
self.inputs = layer.outputs
880+
print(" tensorlayer:Instantiate DropoutLayer %s: keep: %f is_fix: %s" % (self.name, keep, is_fix))
879881

880-
self.all_layers = list(layer.all_layers)
881-
self.all_params = list(layer.all_params)
882-
self.all_drop = dict(layer.all_drop)
883-
if is_fix is False:
884-
self.all_drop.update( {set_keep[name]: keep} )
885-
self.all_layers.extend( [self.outputs] )
882+
# The name of placeholder for keep_prob is the same with the name
883+
# of the Layer.
884+
if is_fix:
885+
self.outputs = tf.nn.dropout(self.inputs, keep, name=name)
886+
else:
887+
set_keep[name] = tf.placeholder(tf.float32)
888+
self.outputs = tf.nn.dropout(self.inputs, set_keep[name], name=name) # 1.2
889+
890+
self.all_layers = list(layer.all_layers)
891+
self.all_params = list(layer.all_params)
892+
self.all_drop = dict(layer.all_drop)
893+
if is_fix is False:
894+
self.all_drop.update( {set_keep[name]: keep} )
895+
self.all_layers.extend( [self.outputs] )
886896

887897
# print(set_keep[name])
888898
# Tensor("Placeholder_2:0", dtype=float32)
@@ -910,26 +920,38 @@ class GaussianNoiseLayer(Layer):
910920
------------
911921
layer : a :class:`Layer` instance
912922
The `Layer` class feeding into this layer.
913-
sigma : float
914-
Scale value of gaussian noise.
923+
mean : float
924+
stddev : float
925+
is_train : boolean
926+
If False, skip this layer, default is True.
915927
name : a string or None
916928
An optional name to attach to this layer.
917929
"""
918930
def __init__(
919931
self,
920932
layer = None,
921-
sigma = 0.1,
933+
mean = 0.0,
934+
stddev = 1.0,
935+
is_train = True,
922936
name = 'gaussian_noise_layer',
923937
):
924938
Layer.__init__(self, name=name)
925-
self.inputs = layer.outputs
926-
print(" tensorlayer:Instantiate GaussianNoiseLayer %s: keep: %f" % (self.name, keep))
927-
with tf.variable_scope(name) as vs:
928-
noise = np.random.normal(0.0 , sigma , tf.to_int64(input_layer).get_shape())
929-
self.inputs = self.inputs + noise
930-
self.all_layers = list(layer.all_layers)
931-
self.all_params = list(layer.all_params)
932-
self.all_drop = dict(layer.all_drop)
939+
if is_train is False:
940+
print(" tensorlayer:skip GaussianNoiseLayer")
941+
self.outputs = layer.outputs
942+
self.all_layers = list(layer.all_layers)
943+
self.all_params = list(layer.all_params)
944+
self.all_drop = dict(layer.all_drop)
945+
else:
946+
self.inputs = layer.outputs
947+
print(" tensorlayer:Instantiate GaussianNoiseLayer %s: mean: %f stddev: %f" % (self.name, mean, stddev))
948+
with tf.variable_scope(name) as vs:
949+
# noise = np.random.normal(0.0 , sigma , tf.to_int64(self.inputs).get_shape())
950+
noise = tf.random_normal(shape = self.inputs.get_shape(), mean=mean, stddev=stddev)
951+
self.outputs = self.inputs + noise
952+
self.all_layers = list(layer.all_layers)
953+
self.all_params = list(layer.all_params)
954+
self.all_drop = dict(layer.all_drop)
933955

934956

935957
class DropconnectDenseLayer(Layer):

0 commit comments

Comments
 (0)