Skip to content

Commit a54c423

Browse files
authored
Merge pull request #13512 from tensor-tang/remove/kwargs
remove kwargs of some activation api
2 parents cdf3a4c + 21f35ee commit a54c423

File tree

6 files changed

+279
-28
lines changed

6 files changed

+279
-28
lines changed

paddle/fluid/API.spec

Lines changed: 19 additions & 19 deletions
Original file line numberDiff line numberDiff line change
@@ -167,6 +167,9 @@ paddle.fluid.layers.stanh ArgSpec(args=['x', 'scale_a', 'scale_b', 'name'], vara
167167
paddle.fluid.layers.hard_sigmoid ArgSpec(args=['x', 'slope', 'offset', 'name'], varargs=None, keywords=None, defaults=(0.2, 0.5, None))
168168
paddle.fluid.layers.swish ArgSpec(args=['x', 'beta', 'name'], varargs=None, keywords=None, defaults=(1.0, None))
169169
paddle.fluid.layers.prelu ArgSpec(args=['x', 'mode', 'param_attr', 'name'], varargs=None, keywords=None, defaults=(None, None))
170+
paddle.fluid.layers.brelu ArgSpec(args=['x', 't_min', 't_max', 'name'], varargs=None, keywords=None, defaults=(0.0, 24.0, None))
171+
paddle.fluid.layers.leaky_relu ArgSpec(args=['x', 'alpha', 'name'], varargs=None, keywords=None, defaults=(0.02, None))
172+
paddle.fluid.layers.soft_relu ArgSpec(args=['x', 'threshold', 'name'], varargs=None, keywords=None, defaults=(40.0, None))
170173
paddle.fluid.layers.flatten ArgSpec(args=['x', 'axis', 'name'], varargs=None, keywords=None, defaults=(1, None))
171174
paddle.fluid.layers.sequence_mask ArgSpec(args=['x', 'maxlen', 'dtype', 'name'], varargs=None, keywords=None, defaults=(None, 'int64', None))
172175
paddle.fluid.layers.stack ArgSpec(args=['x', 'axis'], varargs=None, keywords=None, defaults=(0,))
@@ -262,26 +265,23 @@ paddle.fluid.layers.sum ArgSpec(args=[], varargs='args', keywords='kwargs', defa
262265
paddle.fluid.layers.slice ArgSpec(args=[], varargs='args', keywords='kwargs', defaults=None)
263266
paddle.fluid.layers.shape ArgSpec(args=[], varargs='args', keywords='kwargs', defaults=None)
264267
paddle.fluid.layers.maxout ArgSpec(args=[], varargs='args', keywords='kwargs', defaults=None)
265-
paddle.fluid.layers.sigmoid ArgSpec(args=[], varargs='args', keywords='kwargs', defaults=None)
266-
paddle.fluid.layers.logsigmoid ArgSpec(args=[], varargs='args', keywords='kwargs', defaults=None)
267-
paddle.fluid.layers.exp ArgSpec(args=[], varargs='args', keywords='kwargs', defaults=None)
268-
paddle.fluid.layers.tanh ArgSpec(args=[], varargs='args', keywords='kwargs', defaults=None)
269-
paddle.fluid.layers.tanh_shrink ArgSpec(args=[], varargs='args', keywords='kwargs', defaults=None)
270268
paddle.fluid.layers.softshrink ArgSpec(args=[], varargs='args', keywords='kwargs', defaults=None)
271-
paddle.fluid.layers.sqrt ArgSpec(args=[], varargs='args', keywords='kwargs', defaults=None)
272-
paddle.fluid.layers.abs ArgSpec(args=[], varargs='args', keywords='kwargs', defaults=None)
273-
paddle.fluid.layers.ceil ArgSpec(args=[], varargs='args', keywords='kwargs', defaults=None)
274-
paddle.fluid.layers.floor ArgSpec(args=[], varargs='args', keywords='kwargs', defaults=None)
275-
paddle.fluid.layers.cos ArgSpec(args=[], varargs='args', keywords='kwargs', defaults=None)
276-
paddle.fluid.layers.sin ArgSpec(args=[], varargs='args', keywords='kwargs', defaults=None)
277-
paddle.fluid.layers.round ArgSpec(args=[], varargs='args', keywords='kwargs', defaults=None)
278-
paddle.fluid.layers.reciprocal ArgSpec(args=[], varargs='args', keywords='kwargs', defaults=None)
279-
paddle.fluid.layers.square ArgSpec(args=[], varargs='args', keywords='kwargs', defaults=None)
280-
paddle.fluid.layers.softplus ArgSpec(args=[], varargs='args', keywords='kwargs', defaults=None)
281-
paddle.fluid.layers.softsign ArgSpec(args=[], varargs='args', keywords='kwargs', defaults=None)
282-
paddle.fluid.layers.brelu ArgSpec(args=[], varargs='args', keywords='kwargs', defaults=None)
283-
paddle.fluid.layers.leaky_relu ArgSpec(args=[], varargs='args', keywords='kwargs', defaults=None)
284-
paddle.fluid.layers.soft_relu ArgSpec(args=[], varargs='args', keywords='kwargs', defaults=None)
269+
paddle.fluid.layers.sigmoid ArgSpec(args=['x', 'name'], varargs=None, keywords=None, defaults=(None,))
270+
paddle.fluid.layers.logsigmoid ArgSpec(args=['x', 'name'], varargs=None, keywords=None, defaults=(None,))
271+
paddle.fluid.layers.exp ArgSpec(args=['x', 'name'], varargs=None, keywords=None, defaults=(None,))
272+
paddle.fluid.layers.tanh ArgSpec(args=['x', 'name'], varargs=None, keywords=None, defaults=(None,))
273+
paddle.fluid.layers.tanh_shrink ArgSpec(args=['x', 'name'], varargs=None, keywords=None, defaults=(None,))
274+
paddle.fluid.layers.sqrt ArgSpec(args=['x', 'name'], varargs=None, keywords=None, defaults=(None,))
275+
paddle.fluid.layers.abs ArgSpec(args=['x', 'name'], varargs=None, keywords=None, defaults=(None,))
276+
paddle.fluid.layers.ceil ArgSpec(args=['x', 'name'], varargs=None, keywords=None, defaults=(None,))
277+
paddle.fluid.layers.floor ArgSpec(args=['x', 'name'], varargs=None, keywords=None, defaults=(None,))
278+
paddle.fluid.layers.cos ArgSpec(args=['x', 'name'], varargs=None, keywords=None, defaults=(None,))
279+
paddle.fluid.layers.sin ArgSpec(args=['x', 'name'], varargs=None, keywords=None, defaults=(None,))
280+
paddle.fluid.layers.round ArgSpec(args=['x', 'name'], varargs=None, keywords=None, defaults=(None,))
281+
paddle.fluid.layers.reciprocal ArgSpec(args=['x', 'name'], varargs=None, keywords=None, defaults=(None,))
282+
paddle.fluid.layers.square ArgSpec(args=['x', 'name'], varargs=None, keywords=None, defaults=(None,))
283+
paddle.fluid.layers.softplus ArgSpec(args=['x', 'name'], varargs=None, keywords=None, defaults=(None,))
284+
paddle.fluid.layers.softsign ArgSpec(args=['x', 'name'], varargs=None, keywords=None, defaults=(None,))
285285
paddle.fluid.layers.uniform_random ArgSpec(args=['shape', 'dtype', 'min', 'max', 'seed'], varargs=None, keywords=None, defaults=(None, None, None, None))
286286
paddle.fluid.layers.hard_shrink ArgSpec(args=['x', 'threshold'], varargs=None, keywords=None, defaults=(None,))
287287
paddle.fluid.layers.cumsum ArgSpec(args=['x', 'axis', 'exclusive', 'reverse'], varargs=None, keywords=None, defaults=(None, None, None))

python/paddle/fluid/clip.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -280,7 +280,7 @@ def _create_operators(self, param, grad):
280280
group_scale_name = self.group_name + "_scale"
281281
if group_scale_name not in self.context:
282282
group_norm_var = layers.sums(input=self.context[self.group_name])
283-
layers.sqrt(x=group_norm_var, out=group_norm_var)
283+
group_norm_var = layers.sqrt(x=group_norm_var)
284284
clip_var = self.context[self.group_name + "_clip"]
285285
group_scale_var = layers.elementwise_div(
286286
x=clip_var,

python/paddle/fluid/layers/layer_function_generator.py

Lines changed: 27 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -23,7 +23,10 @@
2323
from ..framework import OpProtoHolder, Variable
2424
from ..layer_helper import LayerHelper
2525

26-
__all__ = ['deprecated', 'generate_layer_fn', 'autodoc', 'templatedoc']
26+
__all__ = [
27+
'deprecated', 'generate_layer_fn', 'generate_layer_fn_noattr', 'autodoc',
28+
'templatedoc'
29+
]
2730

2831

2932
def _convert_(name):
@@ -205,6 +208,29 @@ def func(*args, **kwargs):
205208
return func
206209

207210

211+
def generate_layer_fn_noattr(op_type):
212+
"""Register the Python layer for an Operator without Attribute.
213+
214+
Args:
215+
op_type: The name of the operator to be created.
216+
217+
This function takes in the operator type (sigmoid, exp , tanh etc) and
218+
creates the operator functionality.
219+
220+
"""
221+
op_proto = OpProtoHolder.instance().get_op_proto(op_type)
222+
223+
def func(x, name=None):
224+
helper = LayerHelper(op_type, **locals())
225+
output = helper.create_tmp_variable(dtype=x.dtype)
226+
helper.append_op(type=op_type, inputs={"X": x}, outputs={"Out": output})
227+
return output
228+
229+
func.__name__ = op_type
230+
func.__doc__ = _generate_doc_string_(op_proto)
231+
return func
232+
233+
208234
def deprecated(func_or_class):
209235
"""
210236
Deprecated warning decorator. It will result a warning message.

python/paddle/fluid/layers/nn.py

Lines changed: 71 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -114,6 +114,9 @@
114114
'hard_sigmoid',
115115
'swish',
116116
'prelu',
117+
'brelu',
118+
'leaky_relu',
119+
'soft_relu',
117120
'flatten',
118121
'sequence_mask',
119122
'stack',
@@ -6096,6 +6099,74 @@ def prelu(x, mode, param_attr=None, name=None):
60966099
return out
60976100

60986101

6102+
@templatedoc()
6103+
def brelu(x, t_min=0.0, t_max=24.0, name=None):
6104+
"""
6105+
${comment}
6106+
Args:
6107+
x(${x_type}): ${x_comment}
6108+
t_min(${t_min_type}|0.0): ${t_min_comment}
6109+
t_max(${t_max_type}|24.0): ${t_max_comment}
6110+
name(str|None): A name for this layer(optional). If set None, the layer
6111+
will be named automatically.
6112+
Returns:
6113+
output(${out_type}): ${out_comment}
6114+
"""
6115+
helper = LayerHelper('brelu', **locals())
6116+
out = helper.create_tmp_variable(dtype=x.dtype)
6117+
helper.append_op(
6118+
type='brelu',
6119+
inputs={'X': x},
6120+
outputs={'Out': out},
6121+
attrs={'t_min': t_min,
6122+
't_max': t_max})
6123+
return out
6124+
6125+
6126+
@templatedoc()
6127+
def leaky_relu(x, alpha=0.02, name=None):
6128+
"""
6129+
${comment}
6130+
Args:
6131+
x(${x_type}): ${x_comment}
6132+
alpha(${alpha_type}|0.02): ${alpha_comment}
6133+
name(str|None): A name for this layer(optional). If set None, the layer
6134+
will be named automatically.
6135+
Returns:
6136+
output(${out_type}): ${out_comment}
6137+
"""
6138+
helper = LayerHelper('leaky_relu', **locals())
6139+
out = helper.create_tmp_variable(dtype=x.dtype)
6140+
helper.append_op(
6141+
type='leaky_relu',
6142+
inputs={'X': x},
6143+
outputs={'Out': out},
6144+
attrs={'alpha': alpha})
6145+
return out
6146+
6147+
6148+
@templatedoc()
6149+
def soft_relu(x, threshold=40.0, name=None):
6150+
"""
6151+
${comment}
6152+
Args:
6153+
x(${x_type}): ${x_comment}
6154+
threshold(${threshold_type}|40.0): ${threshold_comment}
6155+
name(str|None): A name for this layer(optional). If set None, the layer
6156+
will be named automatically.
6157+
Returns:
6158+
output(${out_type}): ${out_comment}
6159+
"""
6160+
helper = LayerHelper('soft_relu', **locals())
6161+
out = helper.create_tmp_variable(dtype=x.dtype)
6162+
helper.append_op(
6163+
type='soft_relu',
6164+
inputs={'X': x},
6165+
outputs={'Out': out},
6166+
attrs={'threshold': threshold})
6167+
return out
6168+
6169+
60996170
def flatten(x, axis=1, name=None):
61006171
"""
61016172
**Flatten layer**

python/paddle/fluid/layers/ops.py

Lines changed: 9 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -13,15 +13,14 @@
1313
# limitations under the License.
1414

1515
from __future__ import print_function
16-
from .layer_function_generator import generate_layer_fn
16+
from .layer_function_generator import generate_layer_fn, generate_layer_fn_noattr
1717

18-
__activations__ = [
18+
__activations_noattr__ = [
1919
'sigmoid',
2020
'logsigmoid',
2121
'exp',
2222
'tanh',
2323
'tanh_shrink',
24-
'softshrink',
2524
'sqrt',
2625
'abs',
2726
'ceil',
@@ -33,9 +32,6 @@
3332
'square',
3433
'softplus',
3534
'softsign',
36-
'brelu',
37-
'leaky_relu',
38-
'soft_relu',
3935
]
4036

4137
__all__ = [
@@ -64,11 +60,17 @@
6460
'slice',
6561
'shape',
6662
'maxout',
67-
] + __activations__
63+
'softshrink',
64+
]
6865

6966
for _OP in set(__all__):
7067
globals()[_OP] = generate_layer_fn(_OP)
7168

69+
__all__ += __activations_noattr__
70+
71+
for _OP in set(__activations_noattr__):
72+
globals()[_OP] = generate_layer_fn_noattr(_OP)
73+
7274
__all__ += ["uniform_random"]
7375

7476
_uniform_random_ = generate_layer_fn('uniform_random')

python/paddle/fluid/tests/unittests/test_layers.py

Lines changed: 152 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -573,6 +573,158 @@ def test_prelu(self):
573573
self.assertIsNotNone(out)
574574
print(str(program))
575575

576+
def test_brelu(self):
577+
program = Program()
578+
with program_guard(program):
579+
input = layers.data(name="input", shape=[16], dtype="float32")
580+
out = layers.brelu(input, t_min=1.0, t_max=20.0, name='brelu')
581+
self.assertIsNotNone(out)
582+
print(str(program))
583+
584+
def test_leaky_relu(self):
585+
program = Program()
586+
with program_guard(program):
587+
input = layers.data(name="input", shape=[16], dtype="float32")
588+
out = layers.leaky_relu(input, alpha=0.1, name='leaky_relu')
589+
self.assertIsNotNone(out)
590+
print(str(program))
591+
592+
def test_soft_relu(self):
593+
program = Program()
594+
with program_guard(program):
595+
input = layers.data(name="input", shape=[16], dtype="float32")
596+
out = layers.soft_relu(input, threshold=30.0, name='soft_relu')
597+
self.assertIsNotNone(out)
598+
print(str(program))
599+
600+
def test_sigmoid(self):
601+
program = Program()
602+
with program_guard(program):
603+
input = layers.data(name="input", shape=[16], dtype="float32")
604+
out = layers.sigmoid(input, name='sigmoid')
605+
self.assertIsNotNone(out)
606+
print(str(program))
607+
608+
def test_logsigmoid(self):
609+
program = Program()
610+
with program_guard(program):
611+
input = layers.data(name="input", shape=[16], dtype="float32")
612+
out = layers.logsigmoid(input, name='logsigmoid')
613+
self.assertIsNotNone(out)
614+
print(str(program))
615+
616+
def test_exp(self):
617+
program = Program()
618+
with program_guard(program):
619+
input = layers.data(name="input", shape=[16], dtype="float32")
620+
out = layers.exp(input, name='exp')
621+
self.assertIsNotNone(out)
622+
print(str(program))
623+
624+
def test_tanh(self):
625+
program = Program()
626+
with program_guard(program):
627+
input = layers.data(name="input", shape=[16], dtype="float32")
628+
out = layers.tanh(input, name='tanh')
629+
self.assertIsNotNone(out)
630+
print(str(program))
631+
632+
def test_tanh_shrink(self):
633+
program = Program()
634+
with program_guard(program):
635+
input = layers.data(name="input", shape=[16], dtype="float32")
636+
out = layers.tanh_shrink(input, name='tanh_shrink')
637+
self.assertIsNotNone(out)
638+
print(str(program))
639+
640+
def test_sqrt(self):
641+
program = Program()
642+
with program_guard(program):
643+
input = layers.data(name="input", shape=[16], dtype="float32")
644+
out = layers.sqrt(input, name='sqrt')
645+
self.assertIsNotNone(out)
646+
print(str(program))
647+
648+
def test_abs(self):
649+
program = Program()
650+
with program_guard(program):
651+
input = layers.data(name="input", shape=[16], dtype="float32")
652+
out = layers.abs(input, name='abs')
653+
self.assertIsNotNone(out)
654+
print(str(program))
655+
656+
def test_ceil(self):
657+
program = Program()
658+
with program_guard(program):
659+
input = layers.data(name="input", shape=[16], dtype="float32")
660+
out = layers.ceil(input, name='ceil')
661+
self.assertIsNotNone(out)
662+
print(str(program))
663+
664+
def test_floor(self):
665+
program = Program()
666+
with program_guard(program):
667+
input = layers.data(name="input", shape=[16], dtype="float32")
668+
out = layers.floor(input, name='floor')
669+
self.assertIsNotNone(out)
670+
print(str(program))
671+
672+
def test_cos(self):
673+
program = Program()
674+
with program_guard(program):
675+
input = layers.data(name="input", shape=[16], dtype="float32")
676+
out = layers.cos(input, name='cos')
677+
self.assertIsNotNone(out)
678+
print(str(program))
679+
680+
def test_sin(self):
681+
program = Program()
682+
with program_guard(program):
683+
input = layers.data(name="input", shape=[16], dtype="float32")
684+
out = layers.sin(input, name='sin')
685+
self.assertIsNotNone(out)
686+
print(str(program))
687+
688+
def test_round(self):
689+
program = Program()
690+
with program_guard(program):
691+
input = layers.data(name="input", shape=[16], dtype="float32")
692+
out = layers.round(input, name='round')
693+
self.assertIsNotNone(out)
694+
print(str(program))
695+
696+
def test_reciprocal(self):
697+
program = Program()
698+
with program_guard(program):
699+
input = layers.data(name="input", shape=[16], dtype="float32")
700+
out = layers.reciprocal(input, name='reciprocal')
701+
self.assertIsNotNone(out)
702+
print(str(program))
703+
704+
def test_square(self):
705+
program = Program()
706+
with program_guard(program):
707+
input = layers.data(name="input", shape=[16], dtype="float32")
708+
out = layers.square(input, name='square')
709+
self.assertIsNotNone(out)
710+
print(str(program))
711+
712+
def test_softplus(self):
713+
program = Program()
714+
with program_guard(program):
715+
input = layers.data(name="input", shape=[16], dtype="float32")
716+
out = layers.softplus(input, name='softplus')
717+
self.assertIsNotNone(out)
718+
print(str(program))
719+
720+
def test_softsign(self):
721+
program = Program()
722+
with program_guard(program):
723+
input = layers.data(name="input", shape=[16], dtype="float32")
724+
out = layers.softsign(input, name='softsign')
725+
self.assertIsNotNone(out)
726+
print(str(program))
727+
576728
def test_roi_perspective_transform(self):
577729
program = Program()
578730
with program_guard(program):

0 commit comments

Comments
 (0)