@@ -6972,18 +6972,18 @@ def prelu(x, mode, param_attr=None, name=None):
6972
6972
"""
6973
6973
Equation:
6974
6974
6975
- y = \max(0, x) + alpha \min(0, x)
6975
+ y = \max(0, x) + alpha * \min(0, x)
6976
6976
6977
6977
Args:
6978
6978
x (Variable): The input tensor.
6979
- param_attr(ParamAttr|None): The parameter attribute for the learnable
6980
- weight (alpha).
6981
- mode (string): The mode for weight sharing
6982
- all: all elements share same weight
6983
- channel:elements in a channel share same weight
6984
- element:each element has a weight
6985
- name(str|None): A name for this layer(optional). If set None, the layer
6986
- will be named automatically.
6979
+ param_attr(ParamAttr|None): The parameter attribute for the learnable
6980
+ weight (alpha).
6981
+ mode (string): The mode for weight sharing. It supports all, channel
6982
+ and element. all: all elements share same weight
6983
+ channel:elements in a channel share same weight
6984
+ element:each element has a weight
6985
+ name(str|None): A name for this layer(optional). If set None, the layer
6986
+ will be named automatically.
6987
6987
6988
6988
Returns:
6989
6989
Variable: The output tensor with the same shape as input.
@@ -6992,7 +6992,7 @@ def prelu(x, mode, param_attr=None, name=None):
6992
6992
6993
6993
.. code-block:: python
6994
6994
6995
- x = fluid.layers.data(name="x", shape=[10,10], dtype="float32")
6995
+ x = fluid.layers.data(name="x", shape=[10,10], dtype="float32")
6996
6996
mode = 'channel'
6997
6997
output = fluid.layers.prelu(x,mode)
6998
6998
"""
0 commit comments