Skip to content

Commit dffa8fa

Browse files
committed
add softsign activation.
1 parent c6ec26d commit dffa8fa

File tree

2 files changed

+46
-2
lines changed

2 files changed

+46
-2
lines changed

paddle/gserver/activations/ActivationFunction.cpp

Lines changed: 31 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -212,6 +212,37 @@ Error __must_check backward(Argument& act) {
212212
}
213213
END_DEFINE_ACTIVATION(sequence_softmax)
214214

215+
/*
216+
* @brief SoftSign Activation.
217+
* \f[
218+
* f(z) = \frac{z}{1 + |z|}
219+
* \f]
220+
*/
221+
BEGIN_DEFINE_ACTIVATION(softsign)
222+
private:
223+
MatrixPtr denominator_;
224+
225+
Error __must_check forward(Argument& act) {
226+
size_t height = act.value->getHeight();
227+
size_t width = act.value->getWidth();
228+
Matrix::resizeOrCreate(
229+
denominator_, height, width, false, useGpu(act.deviceId));
230+
denominator_->assign(*act.value);
231+
denominator_->abs2();
232+
denominator_->add(1.);
233+
234+
act.value->dotDiv(*act.value, *denominator_);
235+
return Error();
236+
}
237+
238+
Error __must_check backward(Argument& act) {
239+
denominator_->square2();
240+
denominator_->scalarDiv(*denominator_, 1.);
241+
act.grad->dotMul(*act.grad, *denominator_);
242+
return Error();
243+
}
244+
END_DEFINE_ACTIVATION(softsign)
245+
215246
/**
216247
* @brief Relu Activation.
217248
* forward. y = max(0, z)

python/paddle/trainer_config_helpers/activations.py

Lines changed: 15 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -17,7 +17,8 @@
1717
"IdentityActivation", "LinearActivation", 'SequenceSoftmaxActivation',
1818
'ExpActivation', "ReluActivation", "BReluActivation", "SoftReluActivation",
1919
"STanhActivation", "AbsActivation", "SquareActivation", "BaseActivation",
20-
"LogActivation", "SqrtActivation", "ReciprocalActivation"
20+
"LogActivation", "SqrtActivation", "ReciprocalActivation",
21+
"SoftSignActivation"
2122
]
2223

2324

@@ -243,8 +244,20 @@ class ReciprocalActivation(BaseActivation):
243244
Reciprocal Activation.
244245
245246
.. math::
246-
f(z) = 1/z
247+
f(z)=\\frac{1}{z}
247248
"""
248249

249250
def __init__(self):
250251
BaseActivation.__init__(self, 'reciprocal', False)
252+
253+
254+
class SoftSignActivation(BaseActivation):
255+
"""
256+
SoftSign Activation.
257+
258+
.. math::
259+
f(z)=\\frac{1}{1 + |z|}
260+
"""
261+
262+
def __init__(self):
263+
BaseActivation.__init__(self, 'softsign', False)

0 commit comments

Comments
 (0)