@@ -6904,13 +6904,13 @@ def prelu(x, mode, param_attr=None, name=None):
6904
6904
6905
6905
Args:
6906
6906
x (Variable): The input tensor.
6907
- param_attr(ParamAttr|None): The parameter attribute for the learnable
6908
- weight (alpha).
6907
+ param_attr(ParamAttr|None): The parameter attribute for the learnable
6908
+ weight (alpha).
6909
6909
mode (string): The mode for weight sharing. It supports all, channel
6910
6910
and element. all: all elements share same weight
6911
6911
channel:elements in a channel share same weight
6912
6912
element:each element has a weight
6913
- name(str|None): A name for this layer(optional). If set None, the layer
6913
+ name(str|None): A name for this layer(optional). If set None, the layer
6914
6914
will be named automatically.
6915
6915
6916
6916
Returns:
@@ -6920,9 +6920,9 @@ def prelu(x, mode, param_attr=None, name=None):
6920
6920
6921
6921
.. code-block:: python
6922
6922
6923
- x = fluid.layers.data(name="x", shape=[10,10], dtype="float32")
6924
- mode = 'channel'
6925
- output = fluid.layers.prelu(x,mode)
6923
+ x = fluid.layers.data(name="x", shape=[10,10], dtype="float32")
6924
+ mode = 'channel'
6925
+ output = fluid.layers.prelu(x,mode)
6926
6926
"""
6927
6927
helper = LayerHelper ('prelu' , ** locals ())
6928
6928
if mode not in ['all' , 'channel' , 'element' ]:
0 commit comments