Skip to content

Commit e82c566

Browse files
authored
Update XAIBase dependency to v4, drop support for Julia <1.10 (#19)
* Remove support for Julia <1.10 * Update XAIBase to v4 * Drop Flux `v0.13`
1 parent 22051fd commit e82c566

File tree

7 files changed

+14
-26
lines changed

7 files changed

+14
-26
lines changed

Project.toml

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,7 @@
11
name = "RelevancePropagation"
22
uuid = "0be6dd02-ae9e-43eb-b318-c6e81d6890d8"
33
authors = ["Adrian Hill <[email protected]>"]
4-
version = "2.0.3-DEV"
4+
version = "3.0.0-DEV"
55

66
[deps]
77
Flux = "587475ba-b771-5e3f-ad9e-33799f191a9c"
@@ -14,12 +14,12 @@ XAIBase = "9b48221d-a747-4c1b-9860-46a1d8ba24a7"
1414
Zygote = "e88e6eb3-aa80-5325-afca-941959d7151f"
1515

1616
[compat]
17-
Flux = "0.13, 0.14"
17+
Flux = "0.14"
1818
MacroTools = "0.5"
1919
Markdown = "1"
2020
Random = "1"
2121
Reexport = "1"
2222
Statistics = "1"
23-
XAIBase = "3"
23+
XAIBase = "4"
2424
Zygote = "0.6"
25-
julia = "1.6"
25+
julia = "1.10"

README.md

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -12,7 +12,7 @@ This package is part of the [Julia-XAI ecosystem](https://github.com/Julia-XAI)
1212
[ExplainableAI.jl](https://github.com/Julia-XAI/ExplainableAI.jl).
1313

1414
## Installation
15-
This package supports Julia ≥1.6. To install it, open the Julia REPL and run
15+
This package supports Julia ≥1.10. To install it, open the Julia REPL and run
1616
```julia-repl
1717
julia> ]add RelevancePropagation
1818
```

src/RelevancePropagation.jl

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -2,6 +2,7 @@ module RelevancePropagation
22

33
using Reexport
44
@reexport using XAIBase
5+
import XAIBase: call_analyzer
56

67
using XAIBase: AbstractFeatureSelector, number_of_features
78
using Base.Iterators
@@ -12,7 +13,6 @@ using Zygote
1213
using Markdown
1314
using Statistics: mean, std
1415

15-
include("compat.jl")
1616
include("bibliography.jl")
1717
include("layer_types.jl")
1818
include("layer_utils.jl")

src/compat.jl

Lines changed: 0 additions & 6 deletions
This file was deleted.

src/crp.jl

Lines changed: 4 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -32,7 +32,9 @@ end
3232
# Call to CRP analyzer #
3333
#======================#
3434

35-
function (crp::CRP)(input::AbstractArray{T,N}, ns::AbstractOutputSelector) where {T,N}
35+
function call_analyzer(
36+
input::AbstractArray{T,N}, crp::CRP, ns::AbstractOutputSelector
37+
) where {T,N}
3638
rules = crp.lrp.rules
3739
layers = crp.lrp.model.layers
3840
modified_layers = crp.lrp.modified_layers
@@ -88,5 +90,5 @@ function (crp::CRP)(input::AbstractArray{T,N}, ns::AbstractOutputSelector) where
8890
end
8991
end
9092
end
91-
return Explanation(R_return, last(as), ns(last(as)), :CRP, :attribution, nothing)
93+
return Explanation(R_return, input, last(as), ns(last(as)), :CRP, :attribution, nothing)
9294
end

src/lrp.jl

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -55,16 +55,16 @@ LRP(model::Chain, c::Composite; kwargs...) = LRP(model, lrp_rules(model, c); kwa
5555
# Call to the LRP analyzer #
5656
#==========================#
5757

58-
function (lrp::LRP)(
59-
input::AbstractArray, ns::AbstractOutputSelector; layerwise_relevances=false
58+
function call_analyzer(
59+
input::AbstractArray, lrp::LRP, ns::AbstractOutputSelector; layerwise_relevances=false
6060
)
6161
as = get_activations(lrp.model, input) # compute activations aᵏ for all layers k
6262
Rs = similar.(as) # allocate relevances Rᵏ for all layers k
6363
mask_output_neuron!(Rs[end], as[end], ns) # compute relevance Rᴺ of output layer N
6464

6565
lrp_backward_pass!(Rs, as, lrp.rules, lrp.model, lrp.modified_layers)
6666
extras = layerwise_relevances ? (layerwise_relevances=Rs,) : nothing
67-
return Explanation(first(Rs), last(as), ns(last(as)), :LRP, :attribution, extras)
67+
return Explanation(first(Rs), input, last(as), ns(last(as)), :LRP, :attribution, extras)
6868
end
6969

7070
get_activations(model, input) = (input, Flux.activations(model, input)...)

test/test_batches.jl

Lines changed: 1 addition & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -30,20 +30,12 @@ ANALYZERS = Dict(
3030

3131
for (name, method) in ANALYZERS
3232
@testset "$name" begin
33-
# Using `add_batch_dim=true` should result in same explanation
34-
# as input reshaped to have a batch dimension
35-
analyzer = method(model)
36-
expl1_no_bd = analyzer(input1_no_bd; add_batch_dim=true)
37-
analyzer = method(model)
38-
expl1_bd = analyzer(input1_bd)
39-
@test expl1_bd.val expl1_no_bd.val
40-
4133
# Analyzing a batch should have the same result
4234
# as analyzing inputs in batch individually
4335
analyzer = method(model)
4436
expl2_bd = analyzer(input2_bd)
4537
analyzer = method(model)
4638
expl_batch = analyzer(input_batch)
47-
@test expl1_bd.val expl_batch.val[:, 1]
39+
@test expl2_bd.val expl_batch.val[:, 2]
4840
end
4941
end

0 commit comments

Comments
 (0)