Skip to content

Commit 239ae9d

Browse files
committed
scope name - lrelu
1 parent c60797d commit 239ae9d

File tree

1 file changed

+3
-3
lines changed

1 file changed

+3
-3
lines changed

tensorlayer/activation.py

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -40,7 +40,7 @@ def ramp(x=None, v_min=0, v_max=1, name=None):
4040
"""
4141
return tf.clip_by_value(x, clip_value_min=v_min, clip_value_max=v_max, name=name)
4242

43-
def leaky_relu(x=None, alpha=0.1, name="LeakyReLU"):
43+
def leaky_relu(x=None, alpha=0.1, name="lrelu"):
4444
"""The LeakyReLU, Shortcut is ``lrelu``.
4545
4646
Modified version of ReLU, introducing a nonzero gradient for negative
@@ -63,11 +63,11 @@ def leaky_relu(x=None, alpha=0.1, name="LeakyReLU"):
6363
------------
6464
- `Rectifier Nonlinearities Improve Neural Network Acoustic Models, Maas et al. (2013) <http://web.stanford.edu/~awni/papers/relu_hybrid_icml2013_final.pdf>`_
6565
"""
66-
with tf.name_scope(name) as scope:
66+
# with tf.name_scope(name) as scope:
6767
# x = tf.nn.relu(x)
6868
# m_x = tf.nn.relu(-x)
6969
# x -= alpha * m_x
70-
x = tf.maximum(x, alpha * x)
70+
x = tf.maximum(x, alpha * x, name=name)
7171
return x
7272

7373
#Shortcut

0 commit comments

Comments
 (0)