Skip to content

Commit 3cf3f8f

Browse files
authored
Drop Flux v0.12 compatibility (#99)
* Drop Flux `v0.12` compatibility * Fix for Flux `v0.13.0`
1 parent 26b800e commit 3cf3f8f

File tree

4 files changed

+9
-6
lines changed

4 files changed

+9
-6
lines changed

Project.toml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -20,7 +20,7 @@ Zygote = "e88e6eb3-aa80-5325-afca-941959d7151f"
2020
[compat]
2121
ColorSchemes = "3.18"
2222
Distributions = "0.25"
23-
Flux = "0.12, 0.13"
23+
Flux = "0.13"
2424
ImageCore = "0.8, 0.9"
2525
ImageTransformations = "0.9"
2626
PrettyTables = "1"

src/flux_types.jl

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,6 @@
11
## Layer types
22
"""Union type for convolutional layers."""
3-
const ConvLayer = Union{Conv, ConvTranspose, CrossCor}
3+
const ConvLayer = Union{Conv,ConvTranspose,CrossCor}
44

55
"""Union type for dropout layers."""
66
const DropoutLayer = Union{Dropout,typeof(Flux.dropout),AlphaDropout}

src/flux_utils.jl

Lines changed: 6 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -92,7 +92,9 @@ function preactivation(d::Dense, x::AbstractArray)
9292
return reshape(d(reshape(x, size(x, 1), :)), :, size(x)[2:end]...)
9393
end
9494
function preactivation(c::Conv, x)
95-
cdims = Flux.conv_dims(c, x)
95+
cdims = Flux.DenseConvDims(
96+
x, c.weight; stride=c.stride, padding=c.pad, dilation=c.dilation, groups=c.groups
97+
)
9698
return Flux.conv(x, c.weight, cdims) .+ Flux.conv_reshape_bias(c)
9799
end
98100

@@ -101,7 +103,9 @@ function preactivation(c::ConvTranspose, x)
101103
return Flux.∇conv_data(x, c.weight, cdims) .+ Flux.conv_reshape_bias(c)
102104
end
103105
function preactivation(c::CrossCor, x)
104-
cdims = Flux.crosscor_dims(c, x)
106+
cdims = Flux.DenseConvDims(
107+
x, c.weight; stride=c.stride, padding=c.pad, dilation=c.dilation
108+
)
105109
return Flux.crosscor(x, c.weight, cdims) .+ Flux.conv_reshape_bias(c)
106110
end
107111
function preactivation(l, x)

src/lrp/rules.jl

Lines changed: 1 addition & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -68,8 +68,7 @@ function modify_layer!(rule::R, layer::L; ignore_bias=false) where {R,L}
6868

6969
# Checks that skip bias modification:
7070
ignore_bias && return nothing
71-
isa(layer.bias, Flux.Zeros) && return nothing # skip if bias=Flux.Zeros (Flux <= v0.12)
72-
isa(layer.bias, Bool) && !layer.bias && return nothing # skip if bias=false (Flux >= v0.13)
71+
isa(layer.bias, Bool) && !layer.bias && return nothing
7372

7473
modify_bias!(rule, layer.bias)
7574
return nothing

0 commit comments

Comments
 (0)