Skip to content

Commit 53a3ded

Browse files
authored
Merge pull request #171 from JuliaDiff/ox/broken_reverse_tests
Mark broken reverse mode tests broken
2 parents 759f58d + d820497 commit 53a3ded

File tree

3 files changed

+90
-74
lines changed

3 files changed

+90
-74
lines changed

test/regression.jl

Lines changed: 76 additions & 60 deletions
Original file line numberDiff line numberDiff line change
@@ -13,18 +13,18 @@ const bwd = Diffractor.PrimeDerivativeBack
1313

1414

1515
# Regression tests
16-
@test gradient(x -> sum(abs2, x .+ 1.0), zeros(3))[1] == [2.0, 2.0, 2.0]
16+
@test gradient(x -> sum(abs2, x .+ 1.0), zeros(3))[1] == [2.0, 2.0, 2.0] broken=true # https://github.com/JuliaDiff/Diffractor.jl/issues/170
1717

1818
function f_broadcast(a)
1919
l = a / 2.0 * [[0. 1. 1.]; [1. 0. 1.]; [1. 1. 0.]]
2020
return sum(l)
2121
end
22-
@test fwd(f_broadcast)(1.0) == bwd(f_broadcast)(1.0)
22+
@test fwd(f_broadcast)(1.0) == bwd(f_broadcast)(1.0) broken=true # https://github.com/JuliaDiff/Diffractor.jl/issues/170
2323

2424
# Make sure that there's no infinite recursion in kwarg calls
2525
g_kw(;x=1.0) = sin(x)
2626
f_kw(x) = g_kw(;x)
27-
@test bwd(f_kw)(1.0) == bwd(sin)(1.0)
27+
@test bwd(f_kw)(1.0) == bwd(sin)(1.0) broken=true # https://github.com/JuliaDiff/Diffractor.jl/issues/170
2828

2929
function f_crit_edge(a, b, c, x)
3030
# A function with two critical edges. This used to trigger an issue where
@@ -43,95 +43,111 @@ function f_crit_edge(a, b, c, x)
4343

4444
return y
4545
end
46-
@test bwd(x->f_crit_edge(false, false, false, x))(1.0) == 1.0
47-
@test bwd(x->f_crit_edge(true, true, false, x))(1.0) == 2.0
48-
@test bwd(x->f_crit_edge(false, true, true, x))(1.0) == 12.0
49-
@test bwd(x->f_crit_edge(false, false, true, x))(1.0) == 4.0
46+
@test bwd(x->f_crit_edge(false, false, false, x))(1.0) == 1.0 broken=true # https://github.com/JuliaDiff/Diffractor.jl/issues/170
47+
@test bwd(x->f_crit_edge(true, true, false, x))(1.0) == 2.0 broken=true # https://github.com/JuliaDiff/Diffractor.jl/issues/170
48+
@test bwd(x->f_crit_edge(false, true, true, x))(1.0) == 12.0 broken=true # https://github.com/JuliaDiff/Diffractor.jl/issues/170
49+
@test bwd(x->f_crit_edge(false, false, true, x))(1.0) == 4.0 broken=true # https://github.com/JuliaDiff/Diffractor.jl/issues/170
5050

5151
# Issue #27 - Mixup in lifting of getfield
5252
let var"'" = bwd
53-
@test (x->x^5)''(1.0) == 20.
53+
@test (x->x^5)''(1.0) == 20. broken=true # https://github.com/JuliaDiff/Diffractor.jl/issues/170
5454
@test_broken (x->x^5)'''(1.0) == 60.
5555
end
5656

5757
# Issue #38 - Splatting arrays
58-
@test gradient(x -> max(x...), (1,2,3))[1] == (0.0, 0.0, 1.0)
59-
@test gradient(x -> max(x...), [1,2,3])[1] == [0.0, 0.0, 1.0]
58+
@test gradient(x -> max(x...), (1,2,3))[1] == (0.0, 0.0, 1.0) broken=true # https://github.com/JuliaDiff/Diffractor.jl/issues/170
59+
@test gradient(x -> max(x...), [1,2,3])[1] == [0.0, 0.0, 1.0] broken=true # https://github.com/JuliaDiff/Diffractor.jl/issues/170
6060

6161
# Issue #40 - Symbol type parameters not properly quoted
62-
@test Diffractor.∂⃖recurse{1}()(Val{:transformations})[1] === Val{:transformations}()
62+
@test Diffractor.∂⃖recurse{1}()(Val{:transformations})[1] === Val{:transformations}() broken=true # https://github.com/JuliaDiff/Diffractor.jl/issues/170
6363

6464
# PR #43
6565
loss(res, z, w) = sum(res.U * Diagonal(res.S) * res.V) + sum(res.S .* w)
6666
x43 = rand(10, 10)
67-
@test Diffractor.gradient(x->loss(svd(x), x[:,1], x[:,2]), x43) isa Tuple{Matrix{Float64}}
67+
@test Diffractor.gradient(x->loss(svd(x), x[:,1], x[:,2]), x43) isa Tuple{Matrix{Float64}} broken=true # https://github.com/JuliaDiff/Diffractor.jl/issues/170
6868

6969
# PR # 45 - Calling back into AD from ChainRules
70-
y45, back45 = rrule_via_ad(DiffractorRuleConfig(), x -> log(exp(x)), 2)
71-
@test y45 2.0
72-
@test back45(1) == (ZeroTangent(), 1.0)
70+
@test_broken y45, back45 = rrule_via_ad(DiffractorRuleConfig(), x -> log(exp(x)), 2) # https://github.com/JuliaDiff/Diffractor.jl/issues/170
71+
@test_broken y45 2.0 broken=true # https://github.com/JuliaDiff/Diffractor.jl/issues/170
72+
@test_broken back45(1) == (ZeroTangent(), 1.0) broken=true # https://github.com/JuliaDiff/Diffractor.jl/issues/170
7373

7474
z45, delta45 = frule_via_ad(DiffractorRuleConfig(), (0,1), x -> log(exp(x)), 2)
7575
@test z45 2.0
7676
@test delta45 1.0
7777

7878
# PR #82 - getindex on non-numeric arrays
79-
@test gradient(ls -> ls[1](1.), [Base.Fix1(*, 1.)])[1][1] isa Tangent{<:Base.Fix1}
79+
@test gradient(ls -> ls[1](1.), [Base.Fix1(*, 1.)])[1][1] isa Tangent{<:Base.Fix1} broken=true # https://github.com/JuliaDiff/Diffractor.jl/issues/170
8080

8181
@testset "broadcast" begin
82-
@test gradient(x -> sum(x ./ x), [1,2,3]) == ([0,0,0],) # derivatives_given_output
83-
@test gradient(x -> sum(sqrt.(atan.(x, transpose(x)))), [1,2,3])[1] [0.2338, -0.0177, -0.0661] atol=1e-3
84-
@test gradient(x -> sum(exp.(log.(x))), [1,2,3]) == ([1,1,1],)
82+
# derivatives_given_output
83+
@test gradient(x -> sum(x ./ x), [1,2,3]) == ([0,0,0],) broken=true # https://github.com/JuliaDiff/Diffractor.jl/issues/170
84+
@test gradient(x -> sum(sqrt.(atan.(x, transpose(x)))), [1,2,3])[1] [0.2338, -0.0177, -0.0661] atol=1e-3 broken=true # https://github.com/JuliaDiff/Diffractor.jl/issues/170
85+
@test gradient(x -> sum(exp.(log.(x))), [1,2,3]) == ([1,1,1],) broken=true # https://github.com/JuliaDiff/Diffractor.jl/issues/170
8586

86-
@test gradient(x -> sum((explog).(x)), [1,2,3]) == ([1,1,1],) # frule_via_ad
87+
# frule_via_ad
88+
@test gradient(x -> sum((explog).(x)), [1,2,3]) == ([1,1,1],) broken=true # https://github.com/JuliaDiff/Diffractor.jl/issues/170
8789
exp_log(x) = exp(log(x))
88-
@test gradient(x -> sum(exp_log.(x)), [1,2,3]) == ([1,1,1],)
89-
@test gradient((x,y) -> sum(x ./ y), [1 2; 3 4], [1,2]) == ([1 1; 0.5 0.5], [-3, -1.75])
90-
@test gradient((x,y) -> sum(x ./ y), [1 2; 3 4], 5) == ([0.2 0.2; 0.2 0.2], -0.4)
91-
@test gradient(x -> sum((y -> y/x).([1,2,3])), 4) == (-0.375,) # closure
92-
93-
@test gradient(x -> sum(sum, (x,) ./ x), [1,2,3])[1] [-4.1666, 0.3333, 1.1666] atol=1e-3 # array of arrays
94-
@test gradient(x -> sum(sum, Ref(x) ./ x), [1,2,3])[1] [-4.1666, 0.3333, 1.1666] atol=1e-3
95-
@test gradient(x -> sum(sum, (x,) ./ x), [1,2,3])[1] [-4.1666, 0.3333, 1.1666] atol=1e-3
96-
@test gradient(x -> sum(sum, (x,) .* transpose(x)), [1,2,3])[1] [12, 12, 12] # must not take the * fast path
97-
98-
@test gradient(x -> sum(x ./ 4), [1,2,3]) == ([0.25, 0.25, 0.25],)
99-
@test gradient(x -> sum([1,2,3] ./ x), 4) == (-0.375,) # x/y rule
100-
@test gradient(x -> sum(x.^2), [1,2,3]) == ([2.0, 4.0, 6.0],) # x.^2 rule
101-
@test gradient(x -> sum([1,2,3] ./ x.^2), 4) == (-0.1875,) # scalar^2 rule
102-
103-
@test gradient(x -> sum((1,2,3) .- x), (1,2,3)) == (Tangent{Tuple{Int,Int,Int}}(-1.0, -1.0, -1.0),)
104-
@test gradient(x -> sum(transpose([1,2,3]) .- x), (1,2,3)) == (Tangent{Tuple{Int,Int,Int}}(-3.0, -3.0, -3.0),)
105-
@test gradient(x -> sum([1 2 3] .+ x .^ 2), (1,2,3)) == (Tangent{Tuple{Int,Int,Int}}(6.0, 12.0, 18.0),)
106-
107-
@test gradient(x -> sum(x .> 2), [1,2,3]) |> only |> iszero # Bool output
108-
@test gradient(x -> sum(1 .+ iseven.(x)), [1,2,3]) |> only |> iszero
109-
@test gradient((x,y) -> sum(x .== y), [1,2,3], [1 2 3]) == (NoTangent(), NoTangent())
110-
@test gradient(x -> sum(x .+ [1,2,3]), true) |> only |> iszero # Bool input
111-
@test gradient(x -> sum(x ./ [1,2,3]), [true false]) |> only |> iszero
112-
@test gradient(x -> sum(x .* transpose([1,2,3])), (true, false)) |> only |> iszero
113-
114-
tup_adj = gradient((x,y) -> sum(2 .* x .+ log.(y)), (1,2), transpose([3,4,5]))
115-
@test tup_adj[1] == Tangent{Tuple{Int64, Int64}}(6.0, 6.0)
116-
@test tup_adj[2] [0.6666666666666666 0.5 0.4]
117-
@test tup_adj[2] isa Transpose
118-
@test gradient(x -> sum(atan.(x, (1,2,3))), Diagonal([4,5,6]))[1] isa Diagonal
119-
120-
@test gradient(x -> sum((y -> (x*y)).([1,2,3])), 4.0) == (6.0,) # closure
90+
@test gradient(x -> sum(exp_log.(x)), [1,2,3]) == ([1,1,1],) broken=true # https://github.com/JuliaDiff/Diffractor.jl/issues/170
91+
@test gradient((x,y) -> sum(x ./ y), [1 2; 3 4], [1,2]) == ([1 1; 0.5 0.5], [-3, -1.75]) broken=true # https://github.com/JuliaDiff/Diffractor.jl/issues/170
92+
@test gradient((x,y) -> sum(x ./ y), [1 2; 3 4], 5) == ([0.2 0.2; 0.2 0.2], -0.4) broken=true # https://github.com/JuliaDiff/Diffractor.jl/issues/170
93+
# closure:
94+
@test gradient(x -> sum((y -> y/x).([1,2,3])), 4) == (-0.375,) broken=true # https://github.com/JuliaDiff/Diffractor.jl/issues/170
95+
96+
# array of arrays
97+
@test gradient(x -> sum(sum, (x,) ./ x), [1,2,3])[1] [-4.1666, 0.3333, 1.1666] atol=1e-3 broken=true # https://github.com/JuliaDiff/Diffractor.jl/issues/170
98+
@test gradient(x -> sum(sum, Ref(x) ./ x), [1,2,3])[1] [-4.1666, 0.3333, 1.1666] atol=1e-3 broken=true # https://github.com/JuliaDiff/Diffractor.jl/issues/170
99+
@test gradient(x -> sum(sum, (x,) ./ x), [1,2,3])[1] [-4.1666, 0.3333, 1.1666] atol=1e-3 broken=true # https://github.com/JuliaDiff/Diffractor.jl/issues/170
100+
# must not take fast path
101+
@test gradient(x -> sum(sum, (x,) .* transpose(x)), [1,2,3])[1] [12, 12, 12] broken=true # https://github.com/JuliaDiff/Diffractor.jl/issues/170
102+
103+
@test gradient(x -> sum(x ./ 4), [1,2,3]) == ([0.25, 0.25, 0.25],) broken=true # https://github.com/JuliaDiff/Diffractor.jl/issues/170
104+
# x/y rule
105+
@test gradient(x -> sum([1,2,3] ./ x), 4) == (-0.375,) broken=true # https://github.com/JuliaDiff/Diffractor.jl/issues/170
106+
# x.^2 rule
107+
@test gradient(x -> sum(x.^2), [1,2,3]) == ([2.0, 4.0, 6.0],) broken=true # https://github.com/JuliaDiff/Diffractor.jl/issues/170
108+
# scalar^2 rule
109+
@test gradient(x -> sum([1,2,3] ./ x.^2), 4) == (-0.1875,) broken=true # https://github.com/JuliaDiff/Diffractor.jl/issues/170
110+
111+
@test gradient(x -> sum((1,2,3) .- x), (1,2,3)) == (Tangent{Tuple{Int,Int,Int}}(-1.0, -1.0, -1.0),) broken=true # https://github.com/JuliaDiff/Diffractor.jl/issues/170
112+
@test gradient(x -> sum(transpose([1,2,3]) .- x), (1,2,3)) == (Tangent{Tuple{Int,Int,Int}}(-3.0, -3.0, -3.0),) broken=true # https://github.com/JuliaDiff/Diffractor.jl/issues/170
113+
@test gradient(x -> sum([1 2 3] .+ x .^ 2), (1,2,3)) == (Tangent{Tuple{Int,Int,Int}}(6.0, 12.0, 18.0),) broken=true # https://github.com/JuliaDiff/Diffractor.jl/issues/170
114+
115+
# Bool output
116+
@test gradient(x -> sum(x .> 2), [1,2,3]) |> only |> iszero broken=true # https://github.com/JuliaDiff/Diffractor.jl/issues/170
117+
@test gradient(x -> sum(1 .+ iseven.(x)), [1,2,3]) |> only |> iszero broken=true # https://github.com/JuliaDiff/Diffractor.jl/issues/170
118+
@test gradient((x,y) -> sum(x .== y), [1,2,3], [1 2 3]) == (NoTangent(), NoTangent()) broken=true # https://github.com/JuliaDiff/Diffractor.jl/issues/170
119+
# Bool input
120+
@test gradient(x -> sum(x .+ [1,2,3]), true) |> only |> iszero broken=true # https://github.com/JuliaDiff/Diffractor.jl/issues/170
121+
@test gradient(x -> sum(x ./ [1,2,3]), [true false]) |> only |> iszero broken=true # https://github.com/JuliaDiff/Diffractor.jl/issues/170
122+
@test gradient(x -> sum(x .* transpose([1,2,3])), (true, false)) |> only |> iszero broken=true # https://github.com/JuliaDiff/Diffractor.jl/issues/170
123+
124+
@test_broken tup_adj = gradient((x,y) -> sum(2 .* x .+ log.(y)), (1,2), transpose([3,4,5])) # https://github.com/JuliaDiff/Diffractor.jl/issues/170
125+
@test tup_adj[1] == Tangent{Tuple{Int64, Int64}}(6.0, 6.0) broken=true # https://github.com/JuliaDiff/Diffractor.jl/issues/170
126+
@test tup_adj[2] [0.6666666666666666 0.5 0.4] broken=true # https://github.com/JuliaDiff/Diffractor.jl/issues/170
127+
@test tup_adj[2] isa Transpose broken=true # https://github.com/JuliaDiff/Diffractor.jl/issues/170
128+
@test gradient(x -> sum(atan.(x, (1,2,3))), Diagonal([4,5,6]))[1] isa Diagonal broken=true # https://github.com/JuliaDiff/Diffractor.jl/issues/170
129+
130+
# closure:
131+
@test gradient(x -> sum((y -> (x*y)).([1,2,3])), 4.0) == (6.0,) broken=true # https://github.com/JuliaDiff/Diffractor.jl/issues/170
121132
end
122133

123134
@testset "broadcast, 2nd order" begin
124-
@test gradient(x -> gradient(y -> sum(y .* y), x)[1] |> sum, [1,2,3.0])[1] == [2,2,2] # calls "split broadcasting generic" with f = unthunk
125-
@test gradient(x -> gradient(y -> sum(y .* x), x)[1].^3 |> sum, [1,2,3.0])[1] == [3,12,27]
126-
@test_broken gradient(x -> gradient(y -> sum(y .* 2 .* y'), x)[1] |> sum, [1,2,3.0])[1] == [12, 12, 12] # Control flow support not fully implemented yet for higher-order
127-
128-
@test_broken gradient(x -> sum(gradient(x -> sum(x .^ 2 .+ x'), x)[1]), [1,2,3.0])[1] == [6,6,6] # BoundsError: attempt to access 18-element Vector{Core.Compiler.BasicBlock} at index [0]
135+
# calls "split broadcasting generic" with f = unthunk
136+
@test gradient(x -> gradient(y -> sum(y .* y), x)[1] |> sum, [1,2,3.0])[1] == [2,2,2] broken=true # https://github.com/JuliaDiff/Diffractor.jl/issues/170
137+
@test gradient(x -> gradient(y -> sum(y .* x), x)[1].^3 |> sum, [1,2,3.0])[1] == [3,12,27] broken=true # https://github.com/JuliaDiff/Diffractor.jl/issues/170
138+
# Control flow support not fully implemented yet for higher-order
139+
@test_broken gradient(x -> gradient(y -> sum(y .* 2 .* y'), x)[1] |> sum, [1,2,3.0])[1] == [12, 12, 12]
140+
141+
# BoundsError: attempt to access 18-element Vector{Core.Compiler.BasicBlock} at index [0]
142+
@test_broken gradient(x -> sum(gradient(x -> sum(x .^ 2 .+ x'), x)[1]), [1,2,3.0])[1] == [6,6,6]
129143
@test_broken gradient(x -> sum(gradient(x -> sum((x .+ 1) .* x .- x), x)[1]), [1,2,3.0])[1] == [2,2,2]
130144
@test_broken gradient(x -> sum(gradient(x -> sum(x .* x ./ 2), x)[1]), [1,2,3.0])[1] == [1,1,1]
131145

132-
@test_broken gradient(x -> sum(gradient(x -> sum(exp.(x)), x)[1]), [1,2,3])[1] exp.(1:3) # MethodError: no method matching copy(::Nothing)
146+
# MethodError: no method matching copy(::Nothing)
147+
@test_broken gradient(x -> sum(gradient(x -> sum(exp.(x)), x)[1]), [1,2,3])[1] exp.(1:3)
133148
@test_broken gradient(x -> sum(gradient(x -> sum(atan.(x, x')), x)[1]), [1,2,3.0])[1] [0,0,0]
134-
@test_broken gradient(x -> sum(gradient(x -> sum(transpose(x) .* x), x)[1]), [1,2,3]) == ([6,6,6],) # accum(a::Transpose{Float64, Vector{Float64}}, b::ChainRulesCore.Tangent{Transpose{Int64, Vector{Int64}}, NamedTuple{(:parent,), Tuple{ChainRulesCore.NoTangent}}})
149+
# accum(a::Transpose{Float64, Vector{Float64}}, b::ChainRulesCore.Tangent{Transpose{Int64, Vector{Int64}}, NamedTuple{(:parent,), Tuple{ChainRulesCore.NoTangent}}})
150+
@test_broken gradient(x -> sum(gradient(x -> sum(transpose(x) .* x), x)[1]), [1,2,3]) == ([6,6,6],)
135151
@test_broken gradient(x -> sum(gradient(x -> sum(transpose(x) ./ x.^2), x)[1]), [1,2,3])[1] [27.675925925925927, -0.824074074074074, -2.1018518518518516]
136152

137153
@test_broken gradient(z -> gradient(x -> sum((y -> (x^2*y)).([1,2,3])), z)[1], 5.0) == (12.0,)

test/reverse.jl

Lines changed: 13 additions & 13 deletions
Original file line numberDiff line numberDiff line change
@@ -77,35 +77,35 @@ isa_control_flow(::Type{T}, x) where {T} = isa(x, T) ? x : T(x)
7777
let var"'" = Diffractor.PrimeDerivativeBack
7878
# Integration tests
7979
@test @inferred(sin'(1.0)) == cos(1.0)
80-
@test sin''(1.0) == -sin(1.0)
81-
@test sin'''(1.0) == -cos(1.0)
80+
@test sin''(1.0) == -sin(1.0) broken=true # https://github.com/JuliaDiff/Diffractor.jl/issues/170
81+
@test_broken sin'''(1.0) == -cos(1.0)
8282
# TODO These currently cause segfaults c.f. https://github.com/JuliaLang/julia/pull/48742
8383
# @test sin''''(1.0) == sin(1.0)
8484
# @test sin'''''(1.0) == cos(1.0)
8585
# @test sin''''''(1.0) == -sin(1.0)
8686

8787
f_getfield(x) = getfield((x,), 1)
88-
@test f_getfield'(1) == 1
89-
@test f_getfield''(1) == 0
90-
@test f_getfield'''(1) == 0
88+
@test f_getfield'(1) == 1 broken=true # https://github.com/JuliaDiff/Diffractor.jl/issues/170
89+
@test_broken f_getfield''(1) == 0
90+
@test_broken f_getfield'''(1) == 0
9191

9292
# Higher order mixed mode tests
9393

9494
complicated_2sin(x) = (x = map(sin, Diffractor.xfill(x, 2)); x[1] + x[2])
95-
@test @inferred(complicated_2sin'(1.0)) == 2sin'(1.0)
96-
@test @inferred(complicated_2sin''(1.0)) == 2sin''(1.0) broken=true
97-
@test @inferred(complicated_2sin'''(1.0)) == 2sin'''(1.0) broken=true
95+
@test_broken @inferred(complicated_2sin'(1.0)) == 2sin'(1.0)
96+
@test_broken @inferred(complicated_2sin''(1.0)) == 2sin''(1.0) broken=true
97+
@test_broken @inferred(complicated_2sin'''(1.0)) == 2sin'''(1.0) broken=true
9898
# TODO This currently causes a segfault, c.f. https://github.com/JuliaLang/julia/pull/48742
9999
# @test @inferred(complicated_2sin''''(1.0)) == 2sin''''(1.0) broken=true
100100

101101
# Control flow cases
102-
@test @inferred((x->simple_control_flow(true, x))'(1.0)) == sin'(1.0)
103-
@test @inferred((x->simple_control_flow(false, x))'(1.0)) == cos'(1.0)
104-
@test (x->sum(isa_control_flow(Matrix{Float64}, x)))'(Float32[1 2;]) == [1.0 1.0;]
105-
@test times_three_while'(1.0) == 3.0
102+
@test_broken @inferred((x->simple_control_flow(true, x))'(1.0)) == sin'(1.0)
103+
@test_broken @inferred((x->simple_control_flow(false, x))'(1.0)) == cos'(1.0)
104+
@test_broken (x->sum(isa_control_flow(Matrix{Float64}, x)))'(Float32[1 2;]) == [1.0 1.0;]
105+
@test_broken times_three_while'(1.0) == 3.0
106106

107107
pow5p(x) = (x->mypow(x, 5))'(x)
108-
@test pow5p(1.0) == 5.0
108+
@test_broken pow5p(1.0) == 5.0
109109
end
110110

111111

test/runtests.jl

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -39,7 +39,7 @@ end
3939
my_tuple(args...) = args
4040
ChainRules.rrule(::typeof(my_tuple), args...) = args, Δ->Core.tuple(NoTangent(), Δ...)
4141

42-
@test tup2(my_tuple) == (ZeroTangent(), 4)
42+
@test tup2(my_tuple) == (ZeroTangent(), 4) broken=true # https://github.com/JuliaDiff/Diffractor.jl/issues/170
4343

4444

4545

0 commit comments

Comments
 (0)