@@ -32,25 +32,25 @@ namespace detail {
32
32
namespace forward {
33
33
34
34
template <typename T>
35
- DEVICE T linear (const T a) {
35
+ DEVICE T Identity (const T a) {
36
36
return a;
37
37
}
38
38
39
39
template <typename T>
40
- DEVICE T relu (const T a) {
40
+ DEVICE T Relu (const T a) {
41
41
return a > static_cast <T>(0.0 ) ? a : static_cast <T>(0.0 );
42
42
}
43
43
44
44
template <typename T>
45
- DEVICE T sigmoid (const T a) {
45
+ DEVICE T Sigmoid (const T a) {
46
46
const T min = SIGMOID_THRESHOLD_MIN;
47
47
const T max = SIGMOID_THRESHOLD_MAX;
48
48
T tmp = (a < min) ? min : ((a > max) ? max : a);
49
49
return static_cast <T>(1.0 ) / (static_cast <T>(1.0 ) + exp (-tmp));
50
50
}
51
51
52
52
template <typename T>
53
- DEVICE T tanh (const T a) {
53
+ DEVICE T Tanh (const T a) {
54
54
T tmp = -2.0 * a;
55
55
tmp = (tmp > EXP_MAX_INPUT) ? EXP_MAX_INPUT : tmp;
56
56
return (2.0 / (1.0 + exp (tmp))) - 1.0 ;
@@ -61,22 +61,22 @@ DEVICE T tanh(const T a) {
61
61
namespace backward {
62
62
63
63
template <typename T>
64
- DEVICE T linear (const T a, const T b) {
64
+ DEVICE T Identity (const T a, const T b) {
65
65
return a;
66
66
}
67
67
68
68
template <typename T>
69
- DEVICE T relu (const T a, const T b) {
69
+ DEVICE T Relu (const T a, const T b) {
70
70
return a * (b > 0.0 ? 1.0 : 0.0 );
71
71
}
72
72
73
73
template <typename T>
74
- DEVICE T sigmoid (const T a, const T b) {
74
+ DEVICE T Sigmoid (const T a, const T b) {
75
75
return a * b * (1.0 - b);
76
76
}
77
77
78
78
template <typename T>
79
- DEVICE T tanh (const T a, const T b) {
79
+ DEVICE T Tanh (const T a, const T b) {
80
80
return a * (1.0 - b * b);
81
81
}
82
82
@@ -89,20 +89,20 @@ struct Active {
89
89
};
90
90
91
91
static DEVICE Active<float >::Act kActFloat [] = {
92
- &forward::sigmoid <float >, &forward::relu <float >, &forward::tanh <float >,
93
- &forward::linear <float >};
92
+ &forward::Sigmoid <float >, &forward::Relu <float >, &forward::Tanh <float >,
93
+ &forward::Identity <float >};
94
94
95
95
static DEVICE Active<float >::ActGrad kActGradFloat [] = {
96
- &backward::sigmoid <float >, &backward::relu <float >, &backward::tanh <float >,
97
- &backward::linear <float >};
96
+ &backward::Sigmoid <float >, &backward::Relu <float >, &backward::Tanh <float >,
97
+ &backward::Identity <float >};
98
98
99
99
static DEVICE Active<double >::Act kActDouble [] = {
100
- &forward::sigmoid <double >, &forward::relu <double >, &forward::tanh <double >,
101
- &forward::linear <double >};
100
+ &forward::Sigmoid <double >, &forward::Relu <double >, &forward::Tanh <double >,
101
+ &forward::Identity <double >};
102
102
103
103
static DEVICE Active<double >::ActGrad kActGradDouble [] = {
104
- &backward::sigmoid <double >, &backward::relu <double >,
105
- &backward::tanh <double >, &backward::linear <double >};
104
+ &backward::Sigmoid <double >, &backward::Relu <double >,
105
+ &backward::Tanh <double >, &backward::Identity <double >};
106
106
107
107
namespace forward {
108
108
inline DEVICE float activation (float a, int index) {
@@ -128,29 +128,29 @@ inline DEVICE double activation(double a, double b, int index) {
128
128
#ifdef __AVX__
129
129
namespace forward {
130
130
namespace avx {
131
- __m256 relu (const __m256 a);
132
- __m256 sigmoid (const __m256 a);
133
- __m256 tanh (const __m256 a);
134
- __m256 linear (const __m256 a);
131
+ __m256 Relu (const __m256 a);
132
+ __m256 Sigmoid (const __m256 a);
133
+ __m256 Tanh (const __m256 a);
134
+ __m256 Identity (const __m256 a);
135
135
} // namespace avx
136
136
} // namespace forward
137
137
138
138
namespace backward {
139
139
namespace avx {
140
- __m256 relu (const __m256 a, const __m256 b);
141
- __m256 sigmoid (const __m256 a, const __m256 b);
142
- __m256 tanh (const __m256 a, const __m256 b);
143
- __m256 linear (const __m256 a, const __m256 b);
140
+ __m256 Relu (const __m256 a, const __m256 b);
141
+ __m256 Sigmoid (const __m256 a, const __m256 b);
142
+ __m256 Tanh (const __m256 a, const __m256 b);
143
+ __m256 Identity (const __m256 a, const __m256 b);
144
144
} // namespace avx
145
145
} // namespace backward
146
146
147
147
static Active<__m256>::Act kActAvx [] = {
148
- &forward::avx::sigmoid , &forward::avx::relu , &forward::avx::tanh ,
149
- &forward::avx::linear };
148
+ &forward::avx::Sigmoid , &forward::avx::Relu , &forward::avx::Tanh ,
149
+ &forward::avx::Identity };
150
150
151
151
static Active<__m256>::ActGrad kActGradAvx [] = {
152
- &backward::avx::sigmoid , &backward::avx::relu , &backward::avx::tanh ,
153
- &backward::avx::linear };
152
+ &backward::avx::Sigmoid , &backward::avx::Relu , &backward::avx::Tanh ,
153
+ &backward::avx::Identity };
154
154
155
155
namespace forward {
156
156
inline __m256 activation (__m256 a, int index) { return kActAvx [index](a); }
0 commit comments