Skip to content

Commit 6537b17

Browse files
authored
Merge pull request #13522 from wzzju/remove-kwargs
remove the kwargs of elu,relu6,pow,stanh,hard_sigmoid and swish
2 parents f855c05 + 2b2337e commit 6537b17

File tree

3 files changed

+154
-12
lines changed

3 files changed

+154
-12
lines changed

paddle/fluid/API.spec

Lines changed: 6 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -160,6 +160,12 @@ paddle.fluid.layers.relu ArgSpec(args=['x', 'name'], varargs=None, keywords=None
160160
paddle.fluid.layers.log ArgSpec(args=['x', 'name'], varargs=None, keywords=None, defaults=(None,))
161161
paddle.fluid.layers.crop ArgSpec(args=['x', 'shape', 'offsets', 'name'], varargs=None, keywords=None, defaults=(None, None, None))
162162
paddle.fluid.layers.rank_loss ArgSpec(args=['label', 'left', 'right', 'name'], varargs=None, keywords=None, defaults=(None,))
163+
paddle.fluid.layers.elu ArgSpec(args=['x', 'alpha', 'name'], varargs=None, keywords=None, defaults=(1.0, None))
164+
paddle.fluid.layers.relu6 ArgSpec(args=['x', 'threshold', 'name'], varargs=None, keywords=None, defaults=(6.0, None))
165+
paddle.fluid.layers.pow ArgSpec(args=['x', 'factor', 'name'], varargs=None, keywords=None, defaults=(1.0, None))
166+
paddle.fluid.layers.stanh ArgSpec(args=['x', 'scale_a', 'scale_b', 'name'], varargs=None, keywords=None, defaults=(0.6666666666666666, 1.7159, None))
167+
paddle.fluid.layers.hard_sigmoid ArgSpec(args=['x', 'slope', 'offset', 'name'], varargs=None, keywords=None, defaults=(0.2, 0.5, None))
168+
paddle.fluid.layers.swish ArgSpec(args=['x', 'beta', 'name'], varargs=None, keywords=None, defaults=(1.0, None))
163169
paddle.fluid.layers.prelu ArgSpec(args=['x', 'mode', 'param_attr', 'name'], varargs=None, keywords=None, defaults=(None, None))
164170
paddle.fluid.layers.flatten ArgSpec(args=['x', 'axis', 'name'], varargs=None, keywords=None, defaults=(1, None))
165171
paddle.fluid.layers.sequence_mask ArgSpec(args=['x', 'maxlen', 'dtype', 'name'], varargs=None, keywords=None, defaults=(None, 'int64', None))
@@ -276,12 +282,6 @@ paddle.fluid.layers.softsign ArgSpec(args=[], varargs='args', keywords='kwargs',
276282
paddle.fluid.layers.brelu ArgSpec(args=[], varargs='args', keywords='kwargs', defaults=None)
277283
paddle.fluid.layers.leaky_relu ArgSpec(args=[], varargs='args', keywords='kwargs', defaults=None)
278284
paddle.fluid.layers.soft_relu ArgSpec(args=[], varargs='args', keywords='kwargs', defaults=None)
279-
paddle.fluid.layers.elu ArgSpec(args=[], varargs='args', keywords='kwargs', defaults=None)
280-
paddle.fluid.layers.relu6 ArgSpec(args=[], varargs='args', keywords='kwargs', defaults=None)
281-
paddle.fluid.layers.pow ArgSpec(args=[], varargs='args', keywords='kwargs', defaults=None)
282-
paddle.fluid.layers.stanh ArgSpec(args=[], varargs='args', keywords='kwargs', defaults=None)
283-
paddle.fluid.layers.hard_sigmoid ArgSpec(args=[], varargs='args', keywords='kwargs', defaults=None)
284-
paddle.fluid.layers.swish ArgSpec(args=[], varargs='args', keywords='kwargs', defaults=None)
285285
paddle.fluid.layers.uniform_random ArgSpec(args=['shape', 'dtype', 'min', 'max', 'seed'], varargs=None, keywords=None, defaults=(None, None, None, None))
286286
paddle.fluid.layers.hard_shrink ArgSpec(args=['x', 'threshold'], varargs=None, keywords=None, defaults=(None,))
287287
paddle.fluid.layers.cumsum ArgSpec(args=['x', 'axis', 'exclusive', 'reverse'], varargs=None, keywords=None, defaults=(None, None, None))

python/paddle/fluid/layers/nn.py

Lines changed: 148 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -107,6 +107,12 @@
107107
'log',
108108
'crop',
109109
'rank_loss',
110+
'elu',
111+
'relu6',
112+
'pow',
113+
'stanh',
114+
'hard_sigmoid',
115+
'swish',
110116
'prelu',
111117
'flatten',
112118
'sequence_mask',
@@ -5895,6 +5901,148 @@ def pad2d(input,
58955901
return out
58965902

58975903

5904+
@templatedoc()
5905+
def elu(x, alpha=1.0, name=None):
5906+
"""
5907+
${comment}
5908+
Args:
5909+
x(${x_type}): ${x_comment}
5910+
alpha(${alpha_type}|1.0): ${alpha_comment}
5911+
name(str|None): A name for this layer(optional). If set None, the layer
5912+
will be named automatically.
5913+
5914+
Returns:
5915+
output(${out_type}): ${out_comment}
5916+
"""
5917+
helper = LayerHelper('elu', **locals())
5918+
out = helper.create_tmp_variable(dtype=x.dtype)
5919+
helper.append_op(
5920+
type='elu',
5921+
inputs={'X': x},
5922+
outputs={'Out': out},
5923+
attrs={'alpha': alpha})
5924+
return out
5925+
5926+
5927+
@templatedoc()
5928+
def relu6(x, threshold=6.0, name=None):
5929+
"""
5930+
${comment}
5931+
Args:
5932+
x(${x_type}): ${x_comment}
5933+
threshold(${threshold_type}|6.0): ${threshold_comment}
5934+
name(str|None): A name for this layer(optional). If set None, the layer
5935+
will be named automatically.
5936+
5937+
Returns:
5938+
output(${out_type}): ${out_comment}
5939+
"""
5940+
helper = LayerHelper('relu6', **locals())
5941+
out = helper.create_tmp_variable(dtype=x.dtype)
5942+
helper.append_op(
5943+
type='relu6',
5944+
inputs={'X': x},
5945+
outputs={'Out': out},
5946+
attrs={'threshold': threshold})
5947+
return out
5948+
5949+
5950+
@templatedoc()
5951+
def pow(x, factor=1.0, name=None):
5952+
"""
5953+
${comment}
5954+
Args:
5955+
x(${x_type}): ${x_comment}
5956+
factor(${factor_type}|1.0): ${factor_comment}
5957+
name(str|None): A name for this layer(optional). If set None, the layer
5958+
will be named automatically.
5959+
5960+
Returns:
5961+
output(${out_type}): ${out_comment}
5962+
"""
5963+
helper = LayerHelper('pow', **locals())
5964+
out = helper.create_tmp_variable(dtype=x.dtype)
5965+
helper.append_op(
5966+
type='pow',
5967+
inputs={'X': x},
5968+
outputs={'Out': out},
5969+
attrs={'factor': factor})
5970+
return out
5971+
5972+
5973+
@templatedoc()
5974+
def stanh(x, scale_a=2.0 / 3.0, scale_b=1.7159, name=None):
5975+
"""
5976+
${comment}
5977+
Args:
5978+
x(${x_type}): ${x_comment}
5979+
scale_a(${scale_a_type}|2.0 / 3.0): ${scale_a_comment}
5980+
scale_b(${scale_b_type}|1.7159): ${scale_b_comment}
5981+
name(str|None): A name for this layer(optional). If set None, the layer
5982+
will be named automatically.
5983+
5984+
Returns:
5985+
output(${out_type}): ${out_comment}
5986+
"""
5987+
helper = LayerHelper('stanh', **locals())
5988+
out = helper.create_tmp_variable(dtype=x.dtype)
5989+
helper.append_op(
5990+
type='stanh',
5991+
inputs={'X': x},
5992+
outputs={'Out': out},
5993+
attrs={'scale_a': scale_a,
5994+
'scale_b': scale_b})
5995+
return out
5996+
5997+
5998+
@templatedoc()
5999+
def hard_sigmoid(x, slope=0.2, offset=0.5, name=None):
6000+
"""
6001+
${comment}
6002+
Args:
6003+
x(${x_type}): ${x_comment}
6004+
slope(${slope_type}|0.2): ${slope_comment}
6005+
offset(${offset_type}|0.5): ${offset_comment}
6006+
name(str|None): A name for this layer(optional). If set None, the layer
6007+
will be named automatically.
6008+
6009+
Returns:
6010+
output(${out_type}): ${out_comment}
6011+
"""
6012+
helper = LayerHelper('hard_sigmoid', **locals())
6013+
out = helper.create_tmp_variable(dtype=x.dtype)
6014+
helper.append_op(
6015+
type='hard_sigmoid',
6016+
inputs={'X': x},
6017+
outputs={'Out': out},
6018+
attrs={'slope': slope,
6019+
'offset': offset})
6020+
return out
6021+
6022+
6023+
@templatedoc()
6024+
def swish(x, beta=1.0, name=None):
6025+
"""
6026+
${comment}
6027+
Args:
6028+
x(${x_type}): ${x_comment}
6029+
beta(${beta_type}|1.0): ${beta_comment}
6030+
name(str|None): A name for this layer(optional). If set None, the layer
6031+
will be named automatically.
6032+
6033+
Returns:
6034+
output(${out_type}): ${out_comment}
6035+
"""
6036+
helper = LayerHelper('swish', **locals())
6037+
out = helper.create_tmp_variable(dtype=x.dtype)
6038+
helper.append_op(
6039+
type='swish',
6040+
inputs={'X': x},
6041+
outputs={'Out': out},
6042+
attrs={'slope': beta})
6043+
return out
6044+
6045+
58986046
def prelu(x, mode, param_attr=None, name=None):
58996047
"""
59006048
Equation:

python/paddle/fluid/layers/ops.py

Lines changed: 0 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -36,12 +36,6 @@
3636
'brelu',
3737
'leaky_relu',
3838
'soft_relu',
39-
'elu',
40-
'relu6',
41-
'pow',
42-
'stanh',
43-
'hard_sigmoid',
44-
'swish',
4539
]
4640

4741
__all__ = [

0 commit comments

Comments
 (0)