Skip to content

Commit d7cc49d

Browse files
committed
un-revert the removal of the active=true method
1 parent f42f475 commit d7cc49d

File tree

2 files changed

+2
-13
lines changed

2 files changed

+2
-13
lines changed

src/deprecations.jl

Lines changed: 0 additions & 11 deletions
Original file line numberDiff line numberDiff line change
@@ -186,17 +186,6 @@ function update!(opt::Optimise.AbstractOptimiser, ::Params, grads::Union{Tuple,
186186
end
187187

188188

189-
function dropout(rng, x, p; dims=:, active::Bool=true)
190-
if active
191-
NNlib.dropout(rng, x, p; dims)
192-
else
193-
Base.depwarn("Flux.dropout(...; active=false) is deprecated. Please branch outside the function, or call dropout(x, 0) if you must.", :dropout)
194-
return x
195-
end
196-
end
197-
dropout(x, p; kwargs...) = dropout(NNlib._rng_from_array(x), x, p; kwargs...)
198-
199-
200189
# v0.14 deprecations
201190

202191
# Enable these when 0.14 is released, and delete const ClipGrad = Optimise.ClipValue etc:

test/layers/normalisation.jl

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -56,10 +56,10 @@ evalwgrad(f, x...) = pullback(f, x...)[1]
5656
y = m(x)
5757
@test count(a->a == 0, y) > 50
5858

59-
y = Flux.dropout(values(rng_kwargs)..., x, 0.9, active=true)
59+
y = Flux.dropout(values(rng_kwargs)..., x, 0.9) # , active=true)
6060
@test count(a->a == 0, y) > 50
6161

62-
y = Flux.dropout(values(rng_kwargs)..., x, 0.9, active=false)
62+
y = Flux.dropout(values(rng_kwargs)..., x, 0.9 * 0) # , active=false)
6363
@test count(a->a == 0, y) == 0
6464

6565
# CPU RNGs map onto CPU ok

0 commit comments

Comments
 (0)