Skip to content

Commit 1b89825

Browse files
committed
Remove @inline annotations
since the compiler should do these automatically and they clutter the code
1 parent df4d73f commit 1b89825

File tree

1 file changed

+10
-10
lines changed

1 file changed

+10
-10
lines changed

src/lrp_rules.jl

Lines changed: 10 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -26,14 +26,14 @@ end
2626
2727
Modify input activation before computing relevance propagation.
2828
"""
29-
@inline modify_input(rule, input) = input # general fallback
29+
modify_input(rule, input) = input # general fallback
3030

3131
"""
3232
modify_denominator(rule, d)
3333
3434
Modify denominator ``z`` for numerical stability on the forward pass.
3535
"""
36-
@inline modify_denominator(rule, d) = stabilize_denom(d, 1.0f-9) # general fallback
36+
modify_denominator(rule, d) = stabilize_denom(d, 1.0f-9) # general fallback
3737

3838
"""
3939
check_compat(rule, layer)
@@ -44,7 +44,7 @@ Check compatibility of a LRP-Rule with layer type.
4444
When implementing a custom `check_compat` function, return `nothing` if checks passed,
4545
otherwise throw an `ArgumentError`.
4646
"""
47-
@inline check_compat(rule, layer) = require_weight_and_bias(rule, layer)
47+
check_compat(rule, layer) = require_weight_and_bias(rule, layer)
4848

4949
"""
5050
modify_layer!(rule, layer)
@@ -69,16 +69,16 @@ end
6969
7070
Inplace-modify parameters before computing the relevance.
7171
"""
72-
@inline modify_param!(rule, param) = nothing # general fallback
72+
modify_param!(rule, param) = nothing # general fallback
7373

7474
# Useful presets:
7575
modify_param!(::Val{:mask_positive}, p) = p .= max.(zero(eltype(p)), p)
7676
modify_param!(::Val{:mask_negative}, p) = p .= min.(zero(eltype(p)), p)
7777

7878
# Internal wrapper functions for bias-free layers.
79-
@inline modify_bias!(rule::R, b) where {R} = modify_param!(rule, b)
80-
@inline modify_bias!(rule, b::Flux.Zeros) = nothing # skip if bias=Flux.Zeros (Flux <= v0.12)
81-
@inline function modify_bias!(rule, b::Bool) # skip if bias=false (Flux >= v0.13)
79+
modify_bias!(rule::R, b) where {R} = modify_param!(rule, b)
80+
modify_bias!(rule, b::Flux.Zeros) = nothing # skip if bias=Flux.Zeros (Flux <= v0.12)
81+
function modify_bias!(rule, b::Bool) # skip if bias=false (Flux >= v0.13)
8282
@assert b == false
8383
return nothing
8484
end
@@ -108,7 +108,7 @@ LRP-0 rule. Commonly used on upper layers.
108108
Layer-Wise Relevance Propagation
109109
"""
110110
struct ZeroRule <: AbstractLRPRule end
111-
@inline check_compat(::ZeroRule, layer) = nothing
111+
check_compat(::ZeroRule, layer) = nothing
112112

113113
# Optimization to save allocations since weights don't need to be reset:
114114
get_layer_resetter(::ZeroRule, layer) = Returns(nothing)
@@ -130,7 +130,7 @@ struct EpsilonRule{T} <: AbstractLRPRule
130130
EpsilonRule=1.0f-6) = new{Float32}(ϵ)
131131
end
132132
modify_denominator(r::EpsilonRule, d) = stabilize_denom(d, r.ϵ)
133-
@inline check_compat(::EpsilonRule, layer) = nothing
133+
check_compat(::EpsilonRule, layer) = nothing
134134

135135
# Optimization to save allocations since weights don't need to be reset:
136136
get_layer_resetter(::EpsilonRule, layer) = Returns(nothing)
@@ -172,7 +172,7 @@ function lrp!(Rₖ, ::PassRule, layer, aₖ, Rₖ₊₁)
172172
return nothing
173173
end
174174
# No extra checks as reshaping operation will throw an error if layer isn't compatible:
175-
@inline check_compat(::PassRule, layer) = nothing
175+
check_compat(::PassRule, layer) = nothing
176176

177177
"""
178178
ZBoxRule(low, high)

0 commit comments

Comments
 (0)