Skip to content

Commit 1c35815

Browse files
matsueushistaticfloat
authored andcommitted
Preserve the type in differentiation (#149)
1 parent aec4697 commit 1c35815

File tree

1 file changed

+3
-3
lines changed

1 file changed

+3
-3
lines changed

src/activation.jl

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -51,7 +51,7 @@ Leaky [Rectified Linear Unit](https://en.wikipedia.org/wiki/Rectifier_(neural_ne
5151
activation function.
5252
You can also specify the coefficient explicitly, e.g. `leakyrelu(x, 0.01)`.
5353
"""
54-
leakyrelu(x::Real, a = oftype(x/1, 0.01)) = max(a*x, x/1)
54+
leakyrelu(x::Real, a = oftype(x/1, 0.01)) = max(a*x, x/one(x))
5555

5656

5757
"""
@@ -62,7 +62,7 @@ Exponential Linear Unit activation function.
6262
See [Fast and Accurate Deep Network Learning by Exponential Linear Units](https://arxiv.org/abs/1511.07289).
6363
You can also specify the coefficient explicitly, e.g. `elu(x, 1)`.
6464
"""
65-
elu(x, α = one(x)) = ifelse(x 0, x/1, α * (exp(x) - one(x)))
65+
elu(x, α = one(x)) = ifelse(x 0, x/one(x), α * (exp(x) - one(x)))
6666

6767

6868
"""
@@ -99,7 +99,7 @@ See [Self-Normalizing Neural Networks](https://arxiv.org/pdf/1706.02515.pdf).
9999
function selu(x::Real)
100100
λ = oftype(x/1, 1.0507009873554804934193349852946)
101101
α = oftype(x/1, 1.6732632423543772848170429916717)
102-
λ * ifelse(x > 0, x/1, α * (exp(x) - 1))
102+
λ * ifelse(x > 0, x/one(x), α * (exp(x) - one(x)))
103103
end
104104

105105

0 commit comments

Comments
 (0)