Skip to content

Commit 484796c

Browse files
authored
fix various deprecation warnings (#2197)
* fix various deprecation warnings * added news entry
1 parent 479a094 commit 484796c

File tree

7 files changed

+18
-10
lines changed

7 files changed

+18
-10
lines changed

NEWS.md

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,8 @@
11
# Flux Release Notes
22

3+
## v0.13.14
4+
* Fixed various deprecation warnings, from `Zygone.@nograd` and `Vararg`.
5+
36
## v0.13.13
47
* Added `f16` which changes precision to `Float16`, recursively.
58
* Initial support for AMDGPU via extension mechanism.

Project.toml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,6 @@
11
name = "Flux"
22
uuid = "587475ba-b771-5e3f-ad9e-33799f191a9c"
3-
version = "0.13.13"
3+
version = "0.13.14"
44

55
[deps]
66
Adapt = "79e6a3ab-5dfb-504d-930d-738a2a938a0e"

src/Flux.jl

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -12,7 +12,7 @@ import Optimisers: Optimisers, trainable, destructure # before v0.13, Flux owne
1212
using Optimisers: freeze!, thaw!, adjust!
1313

1414
using Zygote, ChainRulesCore
15-
using Zygote: Params, @adjoint, gradient, pullback, @nograd
15+
using Zygote: Params, @adjoint, gradient, pullback
1616
using Zygote.ForwardDiff: value
1717
export gradient
1818

src/functor.jl

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -91,7 +91,7 @@ end
9191
# Allows caching of the parameters when params is called within gradient() to fix #2040.
9292
# @non_differentiable params(m...) # https://github.com/FluxML/Flux.jl/pull/2054
9393
# That speeds up implicit use, and silently breaks explicit use.
94-
# From @macroexpand Zygote.@nograd params(m...) and https://github.com/FluxML/Zygote.jl/pull/1248
94+
# From @macroexpand Zygote.@non_differentiable params(m...) and https://github.com/FluxML/Zygote.jl/pull/1248
9595
Zygote._pullback(::Zygote.Context{true}, ::typeof(params), m...) = params(m), _ -> nothing
9696

9797
struct FluxCUDAAdaptor end

src/layers/basic.jl

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -50,7 +50,7 @@ end
5050

5151
(c::Chain)(x) = _applychain(c.layers, x)
5252

53-
@generated function _applychain(layers::Tuple{Vararg{<:Any,N}}, x) where {N}
53+
@generated function _applychain(layers::Tuple{Vararg{Any,N}}, x) where {N}
5454
symbols = vcat(:x, [gensym() for _ in 1:N])
5555
calls = [:($(symbols[i+1]) = layers[$i]($(symbols[i]))) for i in 1:N]
5656
Expr(:block, calls...)
@@ -627,7 +627,7 @@ function (m::PairwiseFusion)(x::T) where {T}
627627
end
628628
(m::PairwiseFusion)(xs...) = m(xs)
629629

630-
@generated function applypairwisefusion(layers::Tuple{Vararg{<:Any,N}}, connection, x::T) where {N, T}
630+
@generated function applypairwisefusion(layers::Tuple{Vararg{Any,N}}, connection, x::T) where {N, T}
631631
y_symbols = [gensym() for _ in 1:(N + 1)]
632632
getinput(i) = T <: Tuple ? :(x[$i]) : :x
633633
calls = [:($(y_symbols[N + 1]) = $(getinput(1)))]

src/layers/show.jl

Lines changed: 4 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -45,8 +45,10 @@ function _big_show(io::IO, obj, indent::Int=0, name=nothing)
4545
end
4646

4747
_show_leaflike(x) = isleaf(x) # mostly follow Functors, except for:
48-
_show_leaflike(::Tuple{Vararg{<:Number}}) = true # e.g. stride of Conv
49-
_show_leaflike(::Tuple{Vararg{<:AbstractArray}}) = true # e.g. parameters of LSTMcell
48+
49+
# note the covariance of tuple, using <:T causes warning or error
50+
_show_leaflike(::Tuple{Vararg{Number}}) = true # e.g. stride of Conv
51+
_show_leaflike(::Tuple{Vararg{AbstractArray}}) = true # e.g. parameters of LSTMcell
5052
_show_leaflike(::Scale) = true # appears inside LayerNorm
5153
_show_leaflike(::AbstractArray{<:Number}) = true # e.g. transposed arrays
5254

src/utils.jl

Lines changed: 6 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -625,14 +625,17 @@ true
625625
"""
626626
modules(m) = [x for x in Functors.fcollect(m) if !isleaflike(x)]
627627

628-
@nograd modules # TODO: is this correct? might fail with explicit parameters.
628+
@non_differentiable modules(::Any...) # TODO: is this correct? might fail with explicit parameters.
629629
function ChainRulesCore.rrule(::typeof(modules), m)
630630
modules(m), dm -> error("Flux.modules is not at present differentiable, sorry")
631631
end
632632

633633
isleaflike(x) = Functors.isleaf(x)
634-
isleaflike(::Tuple{Vararg{<:Number}}) = true
635-
isleaflike(::Tuple{Vararg{<:AbstractArray{<:Number}}}) = true
634+
635+
# these are, essentially, Tuple{Vararg{<:T}} using the special property
636+
# of tuples that they are type covariant. Using <: here causes warning or error
637+
isleaflike(::Tuple{Vararg{Number}}) = true
638+
isleaflike(::Tuple{Vararg{AbstractArray{<:Number}}}) = true
636639

637640
"""
638641
patience(predicate, wait)

0 commit comments

Comments
 (0)