Skip to content

Commit ec1ef19

Browse files
committed
Remove onlyreal macro
1 parent 6131a17 commit ec1ef19

File tree

4 files changed

+17
-27
lines changed

4 files changed

+17
-27
lines changed

Project.toml

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -4,6 +4,5 @@ uuid = "872c559c-99b0-510c-b3b7-b6c96a88d5cd"
44
[deps]
55
Libdl = "8f399da3-3557-5675-b5ff-fb832c97cbdb"
66
LinearAlgebra = "37e2e46d-f89d-539d-b4ee-838fcccc9c8e"
7-
MacroTools = "1914dd2f-81c6-5fcd-8719-6d5c9610ff09"
87
Requires = "ae029012-a4dd-5104-9daa-d747884805df"
98
Test = "8dfed614-e22c-5e08-85e1-65c5234f0b40"

REQUIRE

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,3 +1,2 @@
11
julia 0.7-
22
Requires
3-
MacroTools

src/NNlib.jl

Lines changed: 0 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -2,8 +2,6 @@ module NNlib
22

33
using Requires, Libdl
44

5-
using MacroTools: @capture
6-
75
export σ, sigmoid, relu, leakyrelu, elu, gelu, swish, selu, softplus, softsign, logσ, logsigmoid,
86
softmax, logsoftmax, maxpool, meanpool
97

src/activation.jl

Lines changed: 17 additions & 23 deletions
Original file line numberDiff line numberDiff line change
@@ -1,27 +1,15 @@
1-
macro onlyreal(ex)
2-
@capture(ex, (f_(x, a__) = body_) | (function f_(x, a__) body_ end)) ||
3-
error("expected a function with initial argument `x`")
4-
5-
errmsg = "Use explicit invocations such as `$(f).(x)` to apply activation functions to tensors!"
6-
7-
quote
8-
Base.@__doc__ $(f)(x::Real, $(a...)) = $body
9-
$(f)(x::AbstractArray, $(a...)) = error($errmsg)
10-
end |> esc
11-
end
12-
131
"""
142
σ(x) = 1 / (1 + exp(-x))
153
164
Classic [sigmoid](https://en.wikipedia.org/wiki/Sigmoid_function) activation
175
function.
186
"""
19-
@onlyreal σ(x) = one(x) / (one(x) + exp(-x))
7+
σ(x::Real) = one(x) / (one(x) + exp(-x))
208

219
const sigmoid = σ
2210

2311
# ForwardDiff numerical stability hack
24-
@onlyreal σ_stable(x) = ifelse(x < -80, zero(x), one(x) / (one(x) + exp(-x)))
12+
σ_stable(x::Real) = ifelse(x < -80, zero(x), one(x) / (one(x) + exp(-x)))
2513

2614
σ(x::Float32) = σ_stable(x)
2715

@@ -42,7 +30,7 @@ Return `log(σ(x))` which is computed in a numerically stable way.
4230
-10.0
4331
-0.0
4432
"""
45-
@onlyreal function logσ(x)
33+
function logσ(x::Real)
4634
max_v = max(zero(x), -x)
4735
z = exp(-max_v) + exp(-x-max_v)
4836
-(max_v + log(z))
@@ -56,7 +44,7 @@ const logsigmoid = logσ
5644
[Rectified Linear Unit](https://en.wikipedia.org/wiki/Rectifier_(neural_networks))
5745
activation function.
5846
"""
59-
@onlyreal relu(x) = max(zero(x), x)
47+
relu(x::Real) = max(zero(x), x)
6048

6149

6250
"""
@@ -66,7 +54,7 @@ Leaky [Rectified Linear Unit](https://en.wikipedia.org/wiki/Rectifier_(neural_ne
6654
activation function.
6755
You can also specify the coefficient explicitly, e.g. `leakyrelu(x, 0.01)`.
6856
"""
69-
@onlyreal leakyrelu(x, a = oftype(x/1, 0.01)) = max(a*x, x/1)
57+
leakyrelu(x::Real, a = oftype(x/1, 0.01)) = max(a*x, x/1)
7058

7159
"""
7260
elu(x, α = 1) =
@@ -76,15 +64,15 @@ Exponential Linear Unit activation function.
7664
See [Fast and Accurate Deep Network Learning by Exponential Linear Units](https://arxiv.org/abs/1511.07289).
7765
You can also specify the coefficient explicitly, e.g. `elu(x, 1)`.
7866
"""
79-
@onlyreal elu(x, α = one(x)) = ifelse(x 0, x/1, α * (exp(x) - one(x)))
67+
elu(x, α = one(x)) = ifelse(x 0, x/1, α * (exp(x) - one(x)))
8068

8169
"""
8270
gelu(x) = 0.5x*(1 + tanh(√(2/π)*(x + 0.044715x^3)))
8371
8472
[Gaussian Error Linear Unit](https://arxiv.org/pdf/1606.08415.pdf)
8573
activation function.
8674
"""
87-
@onlyreal function gelu(x)
75+
function gelu(x::Real)
8876
λ = oftype(x/1, (2/π))
8977
α = oftype(x/1, 0.044715)
9078
h = oftype(x/1, 0.5)
@@ -98,7 +86,7 @@ end
9886
Self-gated actvation function.
9987
See [Swish: a Self-Gated Activation Function](https://arxiv.org/pdf/1710.05941.pdf).
10088
"""
101-
@onlyreal swish(x) = x * σ(x)
89+
swish(x::Real) = x * σ(x)
10290

10391
"""
10492
selu(x) = λ * (x ≥ 0 ? x : α * (exp(x) - 1))
@@ -109,7 +97,7 @@ See [Swish: a Self-Gated Activation Function](https://arxiv.org/pdf/1710.05941.p
10997
Scaled exponential linear units.
11098
See [Self-Normalizing Neural Networks](https://arxiv.org/pdf/1706.02515.pdf).
11199
"""
112-
@onlyreal function selu(x)
100+
function selu(x::Real)
113101
λ = oftype(x/1, 1.0507009873554804934193349852946)
114102
α = oftype(x/1, 1.6732632423543772848170429916717)
115103
λ * ifelse(x > 0, x/1, α * (exp(x) - 1))
@@ -120,12 +108,18 @@ end
120108
121109
See [Quadratic Polynomials Learn Better Image Features](http://www.iro.umontreal.ca/~lisa/publications2/index.php/attachments/single/205).
122110
"""
123-
@onlyreal softsign(x) = x / (one(x) + abs(x))
111+
softsign(x::Real) = x / (one(x) + abs(x))
124112

125113

126114
"""
127115
softplus(x) = log(exp(x) + 1)
128116
129117
See [Deep Sparse Rectifier Neural Networks](http://proceedings.mlr.press/v15/glorot11a/glorot11a.pdf).
130118
"""
131-
@onlyreal softplus(x) = log1p(exp(x))
119+
softplus(x::Real) = log1p(exp(x))
120+
121+
# Provide an informative error message if activation functions are called with an array
122+
for f in (, :σ_stable, :logσ, :relu, :leakyrelu, :elu, :gelu, :swish, :selu, :softsign, :softplus)
123+
@eval $(f)(x::AbstractArray, args...) =
124+
error("Use explicit invocations such as `", $(string(f)), ".(x)` to apply activation functions to tensors!")
125+
end

0 commit comments

Comments
 (0)