We read every piece of feedback, and take your input very seriously.
To see all available qualifiers, see our documentation.
There was an error while loading. Please reload this page.
v0.12
1 parent 26b800e commit 3cf3f8fCopy full SHA for 3cf3f8f
Project.toml
@@ -20,7 +20,7 @@ Zygote = "e88e6eb3-aa80-5325-afca-941959d7151f"
20
[compat]
21
ColorSchemes = "3.18"
22
Distributions = "0.25"
23
-Flux = "0.12, 0.13"
+Flux = "0.13"
24
ImageCore = "0.8, 0.9"
25
ImageTransformations = "0.9"
26
PrettyTables = "1"
src/flux_types.jl
@@ -1,6 +1,6 @@
1
## Layer types
2
"""Union type for convolutional layers."""
3
-const ConvLayer = Union{Conv, ConvTranspose, CrossCor}
+const ConvLayer = Union{Conv,ConvTranspose,CrossCor}
4
5
"""Union type for dropout layers."""
6
const DropoutLayer = Union{Dropout,typeof(Flux.dropout),AlphaDropout}
src/flux_utils.jl
@@ -92,7 +92,9 @@ function preactivation(d::Dense, x::AbstractArray)
92
return reshape(d(reshape(x, size(x, 1), :)), :, size(x)[2:end]...)
93
end
94
function preactivation(c::Conv, x)
95
- cdims = Flux.conv_dims(c, x)
+ cdims = Flux.DenseConvDims(
96
+ x, c.weight; stride=c.stride, padding=c.pad, dilation=c.dilation, groups=c.groups
97
+ )
98
return Flux.conv(x, c.weight, cdims) .+ Flux.conv_reshape_bias(c)
99
100
@@ -101,7 +103,9 @@ function preactivation(c::ConvTranspose, x)
101
103
return Flux.∇conv_data(x, c.weight, cdims) .+ Flux.conv_reshape_bias(c)
102
104
105
function preactivation(c::CrossCor, x)
- cdims = Flux.crosscor_dims(c, x)
106
107
+ x, c.weight; stride=c.stride, padding=c.pad, dilation=c.dilation
108
109
return Flux.crosscor(x, c.weight, cdims) .+ Flux.conv_reshape_bias(c)
110
111
function preactivation(l, x)
src/lrp/rules.jl
@@ -68,8 +68,7 @@ function modify_layer!(rule::R, layer::L; ignore_bias=false) where {R,L}
68
69
# Checks that skip bias modification:
70
ignore_bias && return nothing
71
- isa(layer.bias, Flux.Zeros) && return nothing # skip if bias=Flux.Zeros (Flux <= v0.12)
72
- isa(layer.bias, Bool) && !layer.bias && return nothing # skip if bias=false (Flux >= v0.13)
+ isa(layer.bias, Bool) && !layer.bias && return nothing
73
74
modify_bias!(rule, layer.bias)
75
return nothing
0 commit comments