Skip to content

Commit 84acf9d

Browse files
committed
Merge branch 'master' into onlyreal
2 parents c07c12c + ed4fe9a commit 84acf9d

35 files changed

+3750
-1717
lines changed

.travis.yml

Lines changed: 4 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -4,7 +4,6 @@ os:
44
- linux
55
- osx
66
julia:
7-
- 0.7
87
- 1.0
98
- 1.1
109
- nightly
@@ -15,6 +14,10 @@ notifications:
1514
email: false
1615
git:
1716
depth: 99999999
17+
env:
18+
# Disable test fuzzing for the moment, as we're a little too slow for Travis
19+
- NNLIB_TEST_FUZZING=false
20+
1821
# Submit to Codecov
1922
after_success:
2023
- if [[ $TRAVIS_JULIA_VERSION = 1.1 ]] && [[ $TRAVIS_OS_NAME = linux ]]; then

Manifest.toml

Lines changed: 27 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -3,6 +3,12 @@
33
[[Base64]]
44
uuid = "2a0f44e3-6c83-55bd-87e4-b1978d98bd5f"
55

6+
[[Crayons]]
7+
deps = ["Test"]
8+
git-tree-sha1 = "f621b8ef51fd2004c7cf157ea47f027fdeac5523"
9+
uuid = "a8cc5b0e-0ffa-5ad4-8c14-923d3ee1735f"
10+
version = "4.0.0"
11+
612
[[Distributed]]
713
deps = ["Random", "Serialization", "Sockets"]
814
uuid = "8ba89e20-285c-5b6f-9357-94700520ee1b"
@@ -25,6 +31,10 @@ uuid = "56ddb016-857b-54e1-b83d-db4d58db5568"
2531
deps = ["Base64"]
2632
uuid = "d6f4376e-aef5-505a-96c1-9c027394607a"
2733

34+
[[Printf]]
35+
deps = ["Unicode"]
36+
uuid = "de0858da-6303-5e67-8744-51eddeeeb8d7"
37+
2838
[[Random]]
2939
deps = ["Serialization"]
3040
uuid = "9a3f8284-a2c9-5f02-9a11-845980a1fd5c"
@@ -41,6 +51,23 @@ uuid = "9e88b42a-f829-5b0c-bbe9-9e923198166b"
4151
[[Sockets]]
4252
uuid = "6462fe0b-24de-5631-8697-dd941f90decc"
4353

54+
[[SparseArrays]]
55+
deps = ["LinearAlgebra", "Random"]
56+
uuid = "2f01184e-e22b-5df5-ae63-d93ebab69eaf"
57+
58+
[[Statistics]]
59+
deps = ["LinearAlgebra", "SparseArrays"]
60+
uuid = "10745b16-79ce-11e8-11f9-7d13ad32a3b2"
61+
4462
[[Test]]
4563
deps = ["Distributed", "InteractiveUtils", "Logging", "Random"]
4664
uuid = "8dfed614-e22c-5e08-85e1-65c5234f0b40"
65+
66+
[[TimerOutputs]]
67+
deps = ["Crayons", "Printf", "Test", "Unicode"]
68+
git-tree-sha1 = "b80671c06f8f8bae08c55d67b5ce292c5ae2660c"
69+
uuid = "a759f4b9-e2f1-59dc-863e-4aeb61b1ea8f"
70+
version = "0.5.0"
71+
72+
[[Unicode]]
73+
uuid = "4ec0a83e-493e-50e2-b9ac-8f72acf5a8f5"

Project.toml

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1,10 +1,13 @@
11
name = "NNlib"
22
uuid = "872c559c-99b0-510c-b3b7-b6c96a88d5cd"
3+
version = "0.6.0"
34

45
[deps]
56
Libdl = "8f399da3-3557-5675-b5ff-fb832c97cbdb"
67
LinearAlgebra = "37e2e46d-f89d-539d-b4ee-838fcccc9c8e"
78
Requires = "ae029012-a4dd-5104-9daa-d747884805df"
9+
Statistics = "10745b16-79ce-11e8-11f9-7d13ad32a3b2"
10+
TimerOutputs = "a759f4b9-e2f1-59dc-863e-4aeb61b1ea8f"
811

912
[extras]
1013
Test = "8dfed614-e22c-5e08-85e1-65c5234f0b40"

REQUIRE

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,2 +1,3 @@
1-
julia 0.7-
1+
julia 1.0
22
Requires
3+
TimerOutputs

src/NNlib.jl

Lines changed: 20 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -1,16 +1,27 @@
11
module NNlib
2+
using Requires, TimerOutputs
23

3-
using Requires, Libdl
4+
const to = TimerOutput()
45

5-
export σ, sigmoid, relu, leakyrelu, elu, gelu, swish, selu, softplus, softsign, logσ, logsigmoid,
6-
softmax, logsoftmax, maxpool, meanpool
7-
8-
include("numeric.jl")
6+
# Include APIs
7+
include("dim_helpers.jl")
98
include("activation.jl")
109
include("softmax.jl")
11-
include("logsoftmax.jl")
12-
include("linalg.jl")
10+
include("gemm.jl")
1311
include("conv.jl")
14-
include("cubroadcast.jl")
12+
include("pooling.jl")
13+
14+
## Include implementations
15+
include("impl/padding_edges.jl")
16+
17+
# Direct implementations of convolutional and depthwise-convolutional algorithms
18+
include("impl/conv_direct.jl")
19+
include("impl/depthwiseconv_direct.jl")
20+
# im2col implementations of convolutional and depthwise-convolutional algorithms
21+
include("impl/conv_im2col.jl")
22+
include("impl/depthwiseconv_im2col.jl")
23+
24+
# Direct implementations of pooling
25+
include("impl/pooling_direct.jl")
1526

16-
end # module
27+
end # module NNlib

src/activation.jl

Lines changed: 13 additions & 13 deletions
Original file line numberDiff line numberDiff line change
@@ -1,43 +1,40 @@
1+
export σ, sigmoid, relu, leakyrelu, elu, gelu, swish, selu, softplus, softsign, logσ,
2+
logsigmoid
3+
14
"""
25
σ(x) = 1 / (1 + exp(-x))
36
47
Classic [sigmoid](https://en.wikipedia.org/wiki/Sigmoid_function) activation
58
function.
69
"""
710
σ(x::Real) = one(x) / (one(x) + exp(-x))
8-
911
const sigmoid = σ
1012

1113
# ForwardDiff numerical stability hack
1214
σ_stable(x::Real) = ifelse(x < -80, zero(x), one(x) / (one(x) + exp(-x)))
13-
1415
σ(x::Float32) = σ_stable(x)
15-
1616
@init @require ForwardDiff="f6369f11-7733-5829-9624-2563aa707210" begin
1717
σ(x::ForwardDiff.Dual{T,Float32}) where T = σ_stable(x)
1818
end
1919

20+
2021
"""
2122
logσ(x)
2223
2324
Return `log(σ(x))` which is computed in a numerically stable way.
2425
25-
julia> logσ(0.)
26+
julia> logσ(0)
2627
-0.6931471805599453
27-
julia> logσ.([-100, -10, 100.])
28+
julia> logσ.([-100, -10, 100])
2829
3-element Array{Float64,1}:
2930
-100.0
30-
-10.0
31-
-0.0
31+
-10.000045398899218
32+
-3.720075976020836e-44
3233
"""
33-
function logσ(x::Real)
34-
max_v = max(zero(x), -x)
35-
z = exp(-max_v) + exp(-x-max_v)
36-
-(max_v + log(z))
37-
end
38-
34+
logσ(x::Real) = -softplus(-x)
3935
const logsigmoid = logσ
4036

37+
4138
"""
4239
relu(x) = max(0, x)
4340
@@ -56,6 +53,7 @@ You can also specify the coefficient explicitly, e.g. `leakyrelu(x, 0.01)`.
5653
"""
5754
leakyrelu(x::Real, a = oftype(x/1, 0.01)) = max(a*x, x/1)
5855

56+
5957
"""
6058
elu(x, α = 1) =
6159
x > 0 ? x : α * (exp(x) - 1)
@@ -66,6 +64,7 @@ You can also specify the coefficient explicitly, e.g. `elu(x, 1)`.
6664
"""
6765
elu(x, α = one(x)) = ifelse(x 0, x/1, α * (exp(x) - one(x)))
6866

67+
6968
"""
7069
gelu(x) = 0.5x*(1 + tanh(√(2/π)*(x + 0.044715x^3)))
7170
@@ -103,6 +102,7 @@ function selu(x::Real)
103102
λ * ifelse(x > 0, x/1, α * (exp(x) - 1))
104103
end
105104

105+
106106
"""
107107
softsign(x) = x / (1 + |x|)
108108

0 commit comments

Comments
 (0)