@@ -37,7 +37,7 @@ using paddle::framework::Tensor;
37
37
" (bool, default false) Set to true for inference only, false " \
38
38
" for training. Some layers may run faster when this is true." ) \
39
39
.SetDefault (false ); \
40
- AddComment (# OP_COMMENT); \
40
+ AddComment (OP_COMMENT); \
41
41
} \
42
42
}
43
43
@@ -124,7 +124,7 @@ class ActivationOpGrad : public framework::OperatorWithKernel {
124
124
UNUSED constexpr char SigmoidDoc[] = R"DOC(
125
125
Sigmoid Activation Operator
126
126
127
- $$out = \frac{1}{1 + e^{-x}}$$
127
+ $$out = \\ frac{1}{1 + e^{-x}}$$
128
128
129
129
)DOC" ;
130
130
@@ -187,14 +187,14 @@ Abs Activation Operator.
187
187
UNUSED constexpr char CeilDoc[] = R"DOC(
188
188
Ceil Activation Operator.
189
189
190
- $out = ceil(x) $
190
+ $out = \left \lceil x \right \rceil $
191
191
192
192
)DOC" ;
193
193
194
194
UNUSED constexpr char FloorDoc[] = R"DOC(
195
195
Floor Activation Operator.
196
196
197
- $out = floor(x) $
197
+ $out = \left \lfloor x \right \rfloor $
198
198
199
199
)DOC" ;
200
200
@@ -252,7 +252,7 @@ Softplus Activation Operator.
252
252
UNUSED constexpr char SoftsignDoc[] = R"DOC(
253
253
Softsign Activation Operator.
254
254
255
- $$out = \frac{x}{1 + |x |}$$
255
+ $$out = \\ frac{x}{1 + \|x\ |}$$
256
256
257
257
)DOC" ;
258
258
0 commit comments