Skip to content

Commit 66be6fe

Browse files
author
qijun
committed
add some source code comments
ISSUE=4592951 git-svn-id: https://svn.baidu.com/idl/trunk/paddle@1447 1ad973e4-5ce8-4261-8a94-b56d1f490c56
1 parent ff496cd commit 66be6fe

File tree

15 files changed

+377
-142
lines changed

15 files changed

+377
-142
lines changed
Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,5 @@
11
Activations
22
=============
33

4-
.. doxygenfile:: paddle/gserver/activations/ActivationFunction.h
5-
.. doxygenfile:: paddle/gserver/activations/ActivationFunction.cpp
4+
.. doxygenclass:: paddle::ActivationFunction
5+
:members:

paddle/gserver/activations/ActivationFunction.cpp

Lines changed: 47 additions & 21 deletions
Original file line numberDiff line numberDiff line change
@@ -28,16 +28,28 @@ limitations under the License. */
2828
namespace paddle {
2929

3030
static ClassRegistrar<ActivationFunction> gActivationRegistrar;
31+
/**
32+
* @def ACTIVATION_CLASS_NAME
33+
* @brief Macro for getting derived activation class name
34+
* @note ACTIVATION_CLASS_NAME(softmax) softmax_;
35+
* means softmaxActivation softmax_;
36+
*/
3137
#define ACTIVATION_CLASS_NAME(ACTIVATION_NAME) ACTIVATION_NAME##Activation
32-
38+
/**
39+
* @def BEGIN_DEFINE_ACTIVATION
40+
* @brief Macro for defining a devried activation class
41+
*/
3342
#define BEGIN_DEFINE_ACTIVATION(ACTIVATION_NAME) \
3443
class ACTIVATION_CLASS_NAME(ACTIVATION_NAME) : public ActivationFunction { \
3544
private: \
3645
static const std::string name; \
3746
\
3847
public: \
3948
const std::string& getName() const { return name; }
40-
49+
/**
50+
* @def END_DEFINE_ACTIVATION
51+
* @brief Macro for registering a derived activation class
52+
*/
4153
#define END_DEFINE_ACTIVATION(ACTIVATION_NAME) \
4254
}; \
4355
const std::string ACTIVATION_CLASS_NAME(ACTIVATION_NAME)::name = \
@@ -66,18 +78,21 @@ static InitFunction __reg_activation__identity([] {
6678
});
6779

6880
/**
69-
* SigmoidActivation
70-
*
81+
* @brief Sigmoid Activation
82+
* \f[
7183
* f(z) = \frac{1}{1+exp(-z)}
84+
* \f]
7285
*/
7386
BEGIN_DEFINE_ACTIVATION(sigmoid)
7487
void forward(Argument& act) { act.value->sigmoid(*act.value); }
7588
void backward(Argument& act) { act.grad->sigmoidDerivative(*act.value); }
7689
END_DEFINE_ACTIVATION(sigmoid)
7790

7891
/**
79-
* Do Softmax activation for all sample.
92+
* @brief Softmax Activation
93+
* \f[
8094
* P(y=j|x) = \frac{e^{x^Tw_j}}{\sum^K_{k=1}e^{x^Tw_k}}
95+
* \f]
8196
*/
8297
BEGIN_DEFINE_ACTIVATION(softmax)
8398
private:
@@ -115,8 +130,12 @@ void backward(Argument& act) {
115130
}
116131
END_DEFINE_ACTIVATION(softmax)
117132

118-
/// Softmax on all frames of one sequence.
119-
/// Width of frame must be one.
133+
134+
/**
135+
* @brief Sequence_softmax Activation
136+
* @note Softmax on all frames of one sequence.
137+
* Width of frame must be one.
138+
*/
120139
BEGIN_DEFINE_ACTIVATION(sequence_softmax)
121140
private:
122141
ACTIVATION_CLASS_NAME(softmax) softmax_;
@@ -156,8 +175,7 @@ void backward(Argument& act) {
156175
END_DEFINE_ACTIVATION(sequence_softmax)
157176

158177
/**
159-
* Relu Activation.
160-
*
178+
* @brief Relu Activation.
161179
* forward. y = max(0, z)
162180
*
163181
* derivative of relu is:
@@ -173,7 +191,7 @@ void backward(Argument& act) { act.grad->reluDerivative(*act.value); }
173191
END_DEFINE_ACTIVATION(relu)
174192

175193
/**
176-
* BRelu Activation.
194+
* @brief BRelu Activation.
177195
*
178196
* forward. y = min(24, max(0, z))
179197
*
@@ -192,9 +210,10 @@ void backward(Argument& act) { act.grad->breluDerivative(*act.value); }
192210
END_DEFINE_ACTIVATION(brelu)
193211

194212
/**
195-
* tanh activation.
196-
*
213+
* @brief Tanh Activation.
214+
* \f[
197215
* f(z) = tanh(z)=\frac{e^z-e^{-z}}{e^z+e^{-z}}
216+
* \f]
198217
*/
199218
BEGIN_DEFINE_ACTIVATION(tanh)
200219
void forward(Argument& act) { act.value->tanh(*act.value); }
@@ -203,9 +222,10 @@ void backward(Argument& act) { act.grad->tanhDerivative(*act.value); }
203222
END_DEFINE_ACTIVATION(tanh)
204223

205224
/**
206-
* Scaled Tanh Activation
207-
*
225+
* @brief Scaled Tanh Activation
226+
* \f[
208227
* f(z) = 1.7159 * tanh(2/3*z)
228+
* \f]
209229
*/
210230
BEGIN_DEFINE_ACTIVATION(stanh)
211231
private:
@@ -221,9 +241,10 @@ void backward(Argument& act) {
221241
END_DEFINE_ACTIVATION(stanh)
222242

223243
/**
224-
* Soft relu activation.
225-
*
244+
* @brief Soft Relu Activation.
245+
* \f[
226246
* f(z) = ln(1+e^z)
247+
* \f]
227248
*/
228249
BEGIN_DEFINE_ACTIVATION(softrelu)
229250
void forward(Argument& act) { act.value->softrelu(*act.value); }
@@ -232,8 +253,7 @@ void backward(Argument& act) { act.grad->softreluDerivative(*act.value); }
232253
END_DEFINE_ACTIVATION(softrelu)
233254

234255
/**
235-
* Abs Activation.
236-
*
256+
* @brief Abs Activation.
237257
* Forward: f(z) = abs(z)
238258
*
239259
* Derivative:
@@ -258,9 +278,10 @@ void backward(Argument& act) { act.grad->absDerivative(*act.in); }
258278
END_DEFINE_ACTIVATION(abs)
259279

260280
/**
261-
* Square Activation.
262-
*
281+
* @brief Square Activation.
282+
* \f[
263283
* f(z) = z^2.
284+
* \f]
264285
*/
265286
BEGIN_DEFINE_ACTIVATION(square)
266287
void forward(Argument& act) {
@@ -274,7 +295,12 @@ void forward(Argument& act) {
274295

275296
void backward(Argument& act) { act.grad->squareDerivative(*act.in); }
276297
END_DEFINE_ACTIVATION(square)
277-
298+
/**
299+
* @brief Exponential Activation.
300+
* \f[
301+
* f(z) = e^z
302+
* \f]
303+
*/
278304
BEGIN_DEFINE_ACTIVATION(exponential)
279305
void forward(Argument& act) { act.value->exp(*act.value); }
280306

paddle/gserver/activations/ActivationFunction.h

Lines changed: 28 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -17,7 +17,18 @@ limitations under the License. */
1717
#include <string>
1818

1919
namespace paddle {
20+
2021
struct Argument;
22+
/**
23+
* @brief Activation function is a function that transforms a set of input
24+
* signals into an output signals. The purpose of the activation function
25+
* is to introduce non-liearilty into the network.
26+
*
27+
* @note Common activation function are provieded, including linear,
28+
* sigmoid, softmax, sequence_max, relu, brelu, tanh, stanh,
29+
* softrelu, abs, square, exponential.
30+
*
31+
*/
2132
class ActivationFunction {
2233
public:
2334
static ActivationFunction* create(const std::string& type);
@@ -26,16 +37,25 @@ class ActivationFunction {
2637

2738
virtual ~ActivationFunction() {}
2839

29-
// act.value <- f(act.value),
30-
// where f is the activation function.
31-
// Suppose that before calling forward(), act.value is x and
32-
// after forward() is called, act.value is y, then y = f(x),
33-
// Usually, act is Layer::output_
40+
/**
41+
* @brief Foward propagation
42+
*
43+
* act.value <- f(act.value),
44+
* where f is the activation function.
45+
* Suppose that before calling forward(), act.value is x and
46+
* after forward() is called, act.value is y, then y = f(x).
47+
*
48+
* Usually, act is Layer::output_
49+
*/
3450
virtual void forward(Argument& act) = 0;
3551

36-
// x and y are defined in the above comment for forward().
37-
// Before calling backward(), act.grad = dE / dy, where E is the error/cost.
38-
// After backward() returns, act.grad = dE / dx = (dE/dy) * (dy/dx)
52+
/**
53+
* @brief Backward propagaion
54+
*
55+
* x and y are defined in the above comment for forward().
56+
* - Before calling backward(), act.grad = dE / dy, where E is the error/cost
57+
* - After backward() returns, act.grad = dE / dx = (dE/dy) * (dy/dx)
58+
*/
3959
virtual void backward(Argument& act) = 0;
4060

4161
virtual const std::string& getName() const = 0;

0 commit comments

Comments
 (0)