Skip to content

Commit 567b711

Browse files
committed
remove kwargs of brelu, leaky_relu and soft_relu
1 parent 2582fec commit 567b711

File tree

3 files changed

+74
-6
lines changed

3 files changed

+74
-6
lines changed

paddle/fluid/API.spec

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -162,6 +162,9 @@ paddle.fluid.layers.log ArgSpec(args=['x', 'name'], varargs=None, keywords=None,
162162
paddle.fluid.layers.crop ArgSpec(args=['x', 'shape', 'offsets', 'name'], varargs=None, keywords=None, defaults=(None, None, None))
163163
paddle.fluid.layers.rank_loss ArgSpec(args=['label', 'left', 'right', 'name'], varargs=None, keywords=None, defaults=(None,))
164164
paddle.fluid.layers.prelu ArgSpec(args=['x', 'mode', 'param_attr', 'name'], varargs=None, keywords=None, defaults=(None, None))
165+
paddle.fluid.layers.brelu ArgSpec(args=['x', 't_min', 't_max', 'name'], varargs=None, keywords=None, defaults=None)
166+
paddle.fluid.layers.leaky_relu ArgSpec(args=['x', 'alpha', 'name'], varargs=None, keywords=None, defaults=None)
167+
paddle.fluid.layers.soft_relu ArgSpec(args=['x', 'threshold', 'name'], varargs=None, keywords=None, defaults=None)
165168
paddle.fluid.layers.flatten ArgSpec(args=['x', 'axis', 'name'], varargs=None, keywords=None, defaults=(1, None))
166169
paddle.fluid.layers.sequence_mask ArgSpec(args=['x', 'maxlen', 'dtype', 'name'], varargs=None, keywords=None, defaults=(None, 'int64', None))
167170
paddle.fluid.layers.stack ArgSpec(args=['x', 'axis'], varargs=None, keywords=None, defaults=(0,))
@@ -258,9 +261,6 @@ paddle.fluid.layers.slice ArgSpec(args=[], varargs='args', keywords='kwargs', de
258261
paddle.fluid.layers.shape ArgSpec(args=[], varargs='args', keywords='kwargs', defaults=None)
259262
paddle.fluid.layers.maxout ArgSpec(args=[], varargs='args', keywords='kwargs', defaults=None)
260263
paddle.fluid.layers.softshrink ArgSpec(args=[], varargs='args', keywords='kwargs', defaults=None)
261-
paddle.fluid.layers.brelu ArgSpec(args=[], varargs='args', keywords='kwargs', defaults=None)
262-
paddle.fluid.layers.leaky_relu ArgSpec(args=[], varargs='args', keywords='kwargs', defaults=None)
263-
paddle.fluid.layers.soft_relu ArgSpec(args=[], varargs='args', keywords='kwargs', defaults=None)
264264
paddle.fluid.layers.elu ArgSpec(args=[], varargs='args', keywords='kwargs', defaults=None)
265265
paddle.fluid.layers.relu6 ArgSpec(args=[], varargs='args', keywords='kwargs', defaults=None)
266266
paddle.fluid.layers.pow ArgSpec(args=[], varargs='args', keywords='kwargs', defaults=None)

python/paddle/fluid/layers/nn.py

Lines changed: 71 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -108,6 +108,9 @@
108108
'crop',
109109
'rank_loss',
110110
'prelu',
111+
'brelu',
112+
'leaky_relu',
113+
'soft_relu',
111114
'flatten',
112115
'sequence_mask',
113116
'stack',
@@ -5948,6 +5951,74 @@ def prelu(x, mode, param_attr=None, name=None):
59485951
return out
59495952

59505953

5954+
@templatedoc()
5955+
def brelu(x, t_min=0.0, t_max=24.0, name=None):
5956+
"""
5957+
${comment}
5958+
Args:
5959+
x(${x_type}): ${x_comment}
5960+
t_min(${t_min_type}|0.0): ${t_min_comment}
5961+
t_max(${t_max_type}|24.0): ${t_max_comment}
5962+
name(str|None): A name for this layer(optional). If set None, the layer
5963+
will be named automatically.
5964+
Returns:
5965+
output(${out_type}): ${out_comment}
5966+
"""
5967+
helper = LayerHelper('brelu', **locals())
5968+
out = helper.create_tmp_variable(dtype=x.dtype)
5969+
helper.append_op(
5970+
type='brelu',
5971+
inputs={'X': x},
5972+
outputs={'Out': out},
5973+
attrs={'t_min': t_min,
5974+
't_max': t_max})
5975+
return out
5976+
5977+
5978+
@templatedoc()
5979+
def leaky_relu(x, alpha=0.02, name=None):
5980+
"""
5981+
${comment}
5982+
Args:
5983+
x(${x_type}): ${x_comment}
5984+
alpha(${alpha_type}|0.02): ${alpha_comment}
5985+
name(str|None): A name for this layer(optional). If set None, the layer
5986+
will be named automatically.
5987+
Returns:
5988+
output(${out_type}): ${out_comment}
5989+
"""
5990+
helper = LayerHelper('leaky_relu', **locals())
5991+
out = helper.create_tmp_variable(dtype=x.dtype)
5992+
helper.append_op(
5993+
type='leaky_relu',
5994+
inputs={'X': x},
5995+
outputs={'Out': out},
5996+
attrs={'alpha': alpha})
5997+
return out
5998+
5999+
6000+
@templatedoc()
6001+
def soft_relu(x, threshold=40.0, name=None):
6002+
"""
6003+
${comment}
6004+
Args:
6005+
x(${x_type}): ${x_comment}
6006+
threshold(${threshold_type}|40.0): ${threshold_comment}
6007+
name(str|None): A name for this layer(optional). If set None, the layer
6008+
will be named automatically.
6009+
Returns:
6010+
output(${out_type}): ${out_comment}
6011+
"""
6012+
helper = LayerHelper('soft_relu', **locals())
6013+
out = helper.create_tmp_variable(dtype=x.dtype)
6014+
helper.append_op(
6015+
type='soft_relu',
6016+
inputs={'X': x},
6017+
outputs={'Out': out},
6018+
attrs={'threshold': threshold})
6019+
return out
6020+
6021+
59516022
def flatten(x, axis=1, name=None):
59526023
"""
59536024
**Flatten layer**

python/paddle/fluid/layers/ops.py

Lines changed: 0 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -17,9 +17,6 @@
1717

1818
__activations__ = [
1919
'softshrink',
20-
'brelu',
21-
'leaky_relu',
22-
'soft_relu',
2320
'elu',
2421
'relu6',
2522
'pow',

0 commit comments

Comments
 (0)