1
- macro onlyreal (ex)
2
- @capture (ex, (f_ (x, a__) = body_) | (function f_ (x, a__) body_ end )) ||
3
- error (" expected a function with initial argument `x`" )
4
-
5
- errmsg = " Use explicit invocations such as `$(f) .(x)` to apply activation functions to tensors!"
6
-
7
- quote
8
- Base. @__doc__ $ (f)(x:: Real , $ (a... )) = $ body
9
- $ (f)(x:: AbstractArray , $ (a... )) = error ($ errmsg)
10
- end |> esc
11
- end
12
-
13
1
"""
14
2
σ(x) = 1 / (1 + exp(-x))
15
3
16
4
Classic [sigmoid](https://en.wikipedia.org/wiki/Sigmoid_function) activation
17
5
function.
18
6
"""
19
- @onlyreal σ (x) = one (x) / (one (x) + exp (- x))
7
+ σ (x:: Real ) = one (x) / (one (x) + exp (- x))
20
8
21
9
const sigmoid = σ
22
10
23
11
# ForwardDiff numerical stability hack
24
- @onlyreal σ_stable (x) = ifelse (x < - 80 , zero (x), one (x) / (one (x) + exp (- x)))
12
+ σ_stable (x:: Real ) = ifelse (x < - 80 , zero (x), one (x) / (one (x) + exp (- x)))
25
13
26
14
σ (x:: Float32 ) = σ_stable (x)
27
15
@@ -42,7 +30,7 @@ Return `log(σ(x))` which is computed in a numerically stable way.
42
30
-10.0
43
31
-0.0
44
32
"""
45
- @onlyreal function logσ (x)
33
+ function logσ (x:: Real )
46
34
max_v = max (zero (x), - x)
47
35
z = exp (- max_v) + exp (- x- max_v)
48
36
- (max_v + log (z))
@@ -56,7 +44,7 @@ const logsigmoid = logσ
56
44
[Rectified Linear Unit](https://en.wikipedia.org/wiki/Rectifier_(neural_networks))
57
45
activation function.
58
46
"""
59
- @onlyreal relu (x) = max (zero (x), x)
47
+ relu (x:: Real ) = max (zero (x), x)
60
48
61
49
62
50
"""
@@ -66,7 +54,7 @@ Leaky [Rectified Linear Unit](https://en.wikipedia.org/wiki/Rectifier_(neural_ne
66
54
activation function.
67
55
You can also specify the coefficient explicitly, e.g. `leakyrelu(x, 0.01)`.
68
56
"""
69
- @onlyreal leakyrelu (x, a = oftype (x/ 1 , 0.01 )) = max (a* x, x/ 1 )
57
+ leakyrelu (x:: Real , a = oftype (x/ 1 , 0.01 )) = max (a* x, x/ 1 )
70
58
71
59
"""
72
60
elu(x, α = 1) =
@@ -76,15 +64,15 @@ Exponential Linear Unit activation function.
76
64
See [Fast and Accurate Deep Network Learning by Exponential Linear Units](https://arxiv.org/abs/1511.07289).
77
65
You can also specify the coefficient explicitly, e.g. `elu(x, 1)`.
78
66
"""
79
- @onlyreal elu (x, α = one (x)) = ifelse (x ≥ 0 , x/ 1 , α * (exp (x) - one (x)))
67
+ elu (x, α = one (x)) = ifelse (x ≥ 0 , x/ 1 , α * (exp (x) - one (x)))
80
68
81
69
"""
82
70
gelu(x) = 0.5x*(1 + tanh(√(2/π)*(x + 0.044715x^3)))
83
71
84
72
[Gaussian Error Linear Unit](https://arxiv.org/pdf/1606.08415.pdf)
85
73
activation function.
86
74
"""
87
- @onlyreal function gelu (x)
75
+ function gelu (x:: Real )
88
76
λ = oftype (x/ 1 , √ (2 / π))
89
77
α = oftype (x/ 1 , 0.044715 )
90
78
h = oftype (x/ 1 , 0.5 )
98
86
Self-gated actvation function.
99
87
See [Swish: a Self-Gated Activation Function](https://arxiv.org/pdf/1710.05941.pdf).
100
88
"""
101
- @onlyreal swish (x) = x * σ (x)
89
+ swish (x:: Real ) = x * σ (x)
102
90
103
91
"""
104
92
selu(x) = λ * (x ≥ 0 ? x : α * (exp(x) - 1))
@@ -109,7 +97,7 @@ See [Swish: a Self-Gated Activation Function](https://arxiv.org/pdf/1710.05941.p
109
97
Scaled exponential linear units.
110
98
See [Self-Normalizing Neural Networks](https://arxiv.org/pdf/1706.02515.pdf).
111
99
"""
112
- @onlyreal function selu (x)
100
+ function selu (x:: Real )
113
101
λ = oftype (x/ 1 , 1.0507009873554804934193349852946 )
114
102
α = oftype (x/ 1 , 1.6732632423543772848170429916717 )
115
103
λ * ifelse (x > 0 , x/ 1 , α * (exp (x) - 1 ))
@@ -120,12 +108,18 @@ end
120
108
121
109
See [Quadratic Polynomials Learn Better Image Features](http://www.iro.umontreal.ca/~lisa/publications2/index.php/attachments/single/205).
122
110
"""
123
- @onlyreal softsign (x) = x / (one (x) + abs (x))
111
+ softsign (x:: Real ) = x / (one (x) + abs (x))
124
112
125
113
126
114
"""
127
115
softplus(x) = log(exp(x) + 1)
128
116
129
117
See [Deep Sparse Rectifier Neural Networks](http://proceedings.mlr.press/v15/glorot11a/glorot11a.pdf).
130
118
"""
131
- @onlyreal softplus (x) = log1p (exp (x))
119
+ softplus (x:: Real ) = log1p (exp (x))
120
+
121
+ # Provide an informative error message if activation functions are called with an array
122
+ for f in (:σ , :σ_stable , :logσ , :relu , :leakyrelu , :elu , :gelu , :swish , :selu , :softsign , :softplus )
123
+ @eval $ (f)(x:: AbstractArray , args... ) =
124
+ error (" Use explicit invocations such as `" , $ (string (f)), " .(x)` to apply activation functions to tensors!" )
125
+ end
0 commit comments