Skip to content

Commit 055df47

Browse files
committed
Polish code
1 parent cbc1b7f commit 055df47

File tree

3 files changed

+30
-10
lines changed

3 files changed

+30
-10
lines changed

paddle/fluid/operators/activation_op.cc

Lines changed: 5 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -275,7 +275,7 @@ class HardShrinkOpMaker : public framework::OpProtoAndCheckerMaker {
275275
"The value of threshold for HardShrink. [default: 0.5]")
276276
.SetDefault(0.5f);
277277
AddComment(R"DOC(
278-
** HardShrink activation operator **
278+
:strong:`HardShrink activation operator`
279279
280280
.. math::
281281
out = \begin{cases}
@@ -394,15 +394,16 @@ class ThresholdedReluOpMaker : public framework::OpProtoAndCheckerMaker {
394394
void Make() override {
395395
AddInput("X", "Input of ThresholdedRelu operator");
396396
AddOutput("Out", "Output of ThresholdedRelu operator");
397-
AddAttr<float>("threshold", "The threshold location of activation")
397+
AddAttr<float>("threshold",
398+
"The threshold location of activation. [default 1.0].")
398399
.SetDefault(1.0f);
399400
AddComment(R"DOC(
400-
ThresholdedRelu Activation Operator.
401+
:strong:`ThresholdedRelu activation operator`
401402
402403
.. math::
403404
404405
out = \begin{cases}
405-
x, \text{if } x > threshold \\
406+
x, \text{if } x > threshold \\
406407
0, \text{otherwise}
407408
\end{cases}
408409
)DOC");

paddle/fluid/operators/row_conv_op.cc

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -94,7 +94,7 @@ class RowConvOpMaker : public framework::OpProtoAndCheckerMaker {
9494
"in this LodTensor is a matrix with shape T x N, i.e., the "
9595
"same shape as X.");
9696
AddComment(R"DOC(
97-
** Row-convolution operator **
97+
:strong:`Row-convolution operator`
9898
9999
The row convolution is called lookahead convolution. This operator was
100100
introduced in the following paper for DeepSpeech2:

python/paddle/fluid/layers/ops.py

Lines changed: 24 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -40,7 +40,6 @@
4040
'relu6',
4141
'pow',
4242
'stanh',
43-
'thresholded_relu',
4443
'hard_sigmoid',
4544
'swish',
4645
]
@@ -91,8 +90,7 @@ def uniform_random(shape, dtype=None, min=None, max=None, seed=None):
9190
return _uniform_random_(**kwargs)
9291

9392

94-
uniform_random.__doc__ = _uniform_random_.__doc__ + "\n" \
95-
+ """
93+
uniform_random.__doc__ = _uniform_random_.__doc__ + """
9694
Examples:
9795
9896
>>> result = fluid.layers.uniform_random(shape=[32, 784])
@@ -112,8 +110,7 @@ def hard_shrink(x, threshold=None):
112110
return _hard_shrink_(**kwargs)
113111

114112

115-
hard_shrink.__doc__ = _hard_shrink_.__doc__ + "\n" \
116-
+ """
113+
hard_shrink.__doc__ = _hard_shrink_.__doc__ + """
117114
Examples:
118115
119116
>>> data = fluid.layers.data(name="input", shape=[784])
@@ -141,3 +138,25 @@ def cumsum(x, axis=None, exclusive=None, reverse=None):
141138
>>> data = fluid.layers.data(name="input", shape=[32, 784])
142139
>>> result = fluid.layers.cumsum(data, axis=0)
143140
"""
141+
142+
__all__ += ['thresholded_relu']
143+
144+
_thresholded_relu_ = generate_layer_fn('thresholded_relu')
145+
146+
147+
def thresholded_relu(x, threshold=None):
148+
kwargs = dict()
149+
for name in locals():
150+
val = locals()[name]
151+
if val is not None:
152+
kwargs[name] = val
153+
154+
_thresholded_relu_(**kwargs)
155+
156+
157+
thresholded_relu.__doc__ = _thresholded_relu_.__doc__ + """
158+
Examples:
159+
160+
>>> data = fluid.layers.data(name="input", shape=[1])
161+
>>> result = fluid.layers.thresholded_relu(data, threshold=0.4)
162+
"""

0 commit comments

Comments
 (0)