diff --git a/src/Flux.jl b/src/Flux.jl index 251a14a5f5..5cd17ad0c4 100644 --- a/src/Flux.jl +++ b/src/Flux.jl @@ -35,6 +35,7 @@ include("layers/basic.jl") include("layers/conv.jl") include("layers/recurrent.jl") include("layers/normalise.jl") +include("layers/upsample.jl") include("data/Data.jl") diff --git a/src/layers/upsample.jl b/src/layers/upsample.jl new file mode 100644 index 0000000000..d7048fa8cd --- /dev/null +++ b/src/layers/upsample.jl @@ -0,0 +1,61 @@ +""" + BilinearUpsample(factor::Tuple{Integer,Integer}) + +Create an upsampling layer that uses bilinear interpolation. + +The width and height dimensions grow by the `factor` tuple. + +# Examples +```jldoctest; setup = :(using Flux: BilinearUpsample; using Random; Random.seed!(0)) +julia> b = Flux.BilinearUpsample((2, 2)) +BilinearUpsample(2, 2) + +julia> b(rand(2, 2, 1, 1)) +4×4×1×1 Array{Float64,4}: +[:, :, 1, 1] = + 0.823648 0.658877 0.329336 0.164566 + 0.845325 0.675933 0.337149 0.167757 + 0.888679 0.710044 0.352773 0.174138 + 0.910357 0.7271 0.360586 0.177329``` +""" +struct BilinearUpsample{T<:Integer} + factor::Tuple{T,T} +end + +function (b::BilinearUpsample)(x::AbstractArray) + W, H, C, N = size(x) + + newW = W * b.factor[1] + newH = H * b.factor[2] + + out = similar(x, (newW, newH, C, N)) + + @inbounds for w = 1:newW, h = 1:newH + w₀ = (w - 0.5) / b.factor[1] + 0.5 + h₀ = (h - 0.5) / b.factor[2] + 0.5 + + w1 = floor(Int, w₀) + w2 = w1 + 1 + h1 = floor(Int, h₀) + h2 = h1 + 1 + + i1 = clamp(w1, 1, W) + i2 = clamp(w2, 1, W) + j1 = clamp(h1, 1, H) + j2 = clamp(h2, 1, H) + + @views out[w, h, :, :] = + ( + x[i1, j1, :, :] * (w2 - w₀) * (h2 - h₀) + + x[i1, j2, :, :] * (w2 - w₀) * (h₀ - h1) + + x[i2, j1, :, :] * (w₀ - w1) * (h2 - h₀) + + x[i2, j2, :, :] * (w₀ - w1) * (h₀ - h1) + ) / ((w2 - w1) * (h2 - h1)) + end + + out +end + +function Base.show(io::IO, b::BilinearUpsample) + print(io, "BilinearUpsample(", b.factor[1], ", ", b.factor[2], ")") +end diff --git a/test/layers/upsample.jl b/test/layers/upsample.jl new file mode 100644 index 0000000000..d3df3505b0 --- /dev/null +++ b/test/layers/upsample.jl @@ -0,0 +1,11 @@ +using Flux: BilinearUpsample +using Test + +@testset "BilinearUpsample" begin + @test size(BilinearUpsample((2, 2))(rand(2, 2, 1, 1))) == (4, 4, 1, 1) + @test size(BilinearUpsample((3, 3))(rand(2, 2, 1, 1))) == (6, 6, 1, 1) + @test size(BilinearUpsample((2, 2))(rand(2, 2, 10, 10))) == (4, 4, 10, 10) + @test size(BilinearUpsample((3, 3))(rand(2, 2, 10, 10))) == (6, 6, 10, 10) + + @test_throws BoundsError BilinearUpsample((2, 2))(rand(2, 2)) +end diff --git a/test/runtests.jl b/test/runtests.jl index c2ea0715cf..637460de01 100644 --- a/test/runtests.jl +++ b/test/runtests.jl @@ -1,6 +1,6 @@ -using Flux +using Flux using Flux.Data -using Test +using Test using Random, Statistics, LinearAlgebra using Documenter using IterTools: ncycle @@ -30,6 +30,7 @@ Random.seed!(0) include("layers/normalisation.jl") include("layers/stateless.jl") include("layers/conv.jl") + include("layers/upsample.jl") end @testset "CUDA" begin