Skip to content

Commit 22de279

Browse files
Merge pull request #989 from SciML/gd/sparsead
Add sparse AD comparison
2 parents 2dd3f58 + dd915a0 commit 22de279

File tree

3 files changed

+1397
-0
lines changed

3 files changed

+1397
-0
lines changed
Lines changed: 169 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,169 @@
1+
---
2+
title: Sparse AD benchmarks
3+
author: Guillaume Dalle
4+
---
5+
6+
```julia
7+
using ADTypes
8+
using LinearAlgebra, SparseArrays
9+
using BenchmarkTools, DataFrames
10+
import DifferentiationInterface as DI
11+
import SparseDiffTools as SDT
12+
using SparseConnectivityTracer: TracerSparsityDetector
13+
using SparseMatrixColorings: GreedyColoringAlgorithm
14+
using Symbolics: SymbolicsSparsityDetector
15+
using Test
16+
```
17+
18+
## Definitions
19+
20+
```julia
21+
const N = 32
22+
const xyd_brusselator = range(0; stop=1, length=N)
23+
const p = (3.4, 1.0, 10.0, step(xyd_brusselator))
24+
25+
brusselator_f(x, y, t) = (((x - 0.3)^2 + (y - 0.6)^2) <= 0.1^2) * (t >= 1.1) * 5.0
26+
limit(a, N) =
27+
if a == N + 1
28+
1
29+
elseif a == 0
30+
N
31+
else
32+
a
33+
end;
34+
```
35+
36+
```julia
37+
function brusselator_2d_loop(du, u, p, t)
38+
A, B, alpha, dx = p
39+
alpha = alpha / dx^2
40+
@inbounds for I in CartesianIndices((N, N))
41+
i, j = Tuple(I)
42+
x, y = xyd_brusselator[I[1]], xyd_brusselator[I[2]]
43+
ip1, im1, jp1, jm1 = limit(i + 1, N),
44+
limit(i - 1, N), limit(j + 1, N),
45+
limit(j - 1, N)
46+
du[i, j, 1] =
47+
alpha *
48+
(u[im1, j, 1] + u[ip1, j, 1] + u[i, jp1, 1] + u[i, jm1, 1] - 4u[i, j, 1]) +
49+
B +
50+
u[i, j, 1]^2 * u[i, j, 2] - (A + 1) * u[i, j, 1] + brusselator_f(x, y, t)
51+
du[i, j, 2] =
52+
alpha *
53+
(u[im1, j, 2] + u[ip1, j, 2] + u[i, jp1, 2] + u[i, jm1, 2] - 4u[i, j, 2]) +
54+
A * u[i, j, 1] - u[i, j, 1]^2 * u[i, j, 2]
55+
end
56+
end;
57+
```
58+
59+
```julia
60+
function init_brusselator_2d(xyd)
61+
N = length(xyd)
62+
u = zeros(N, N, 2)
63+
for I in CartesianIndices((N, N))
64+
x = xyd[I[1]]
65+
y = xyd[I[2]]
66+
u[I, 1] = 22 * (y * (1 - y))^(3 / 2)
67+
u[I, 2] = 27 * (x * (1 - x))^(3 / 2)
68+
end
69+
return u
70+
end;
71+
```
72+
73+
```julia
74+
x0 = init_brusselator_2d(xyd_brusselator);
75+
y0 = similar(x0);
76+
77+
f!(y, x) = brusselator_2d_loop(y, x, p, 0.0);
78+
```
79+
80+
## Sparsity detection
81+
82+
```julia
83+
S1 = ADTypes.jacobian_sparsity(f!, y0, x0, TracerSparsityDetector())
84+
S2 = ADTypes.jacobian_sparsity(f!, y0, x0, SymbolicsSparsityDetector())
85+
@test S1 == S2
86+
```
87+
88+
```julia
89+
td1 = @belapsed ADTypes.jacobian_sparsity($f!, $y0, $x0, TracerSparsityDetector())
90+
println("Sparsity detection with SparseConnectivityTracer: $td1 s")
91+
```
92+
93+
```julia
94+
td2 = @belapsed ADTypes.jacobian_sparsity($f!, $y0, $x0, SymbolicsSparsityDetector())
95+
println("Sparsity detection with Symbolics: $td2 s")
96+
```
97+
98+
```julia
99+
println("Speedup from new sparsity detection method (>1 is better): $(td2 / td1)")
100+
```
101+
102+
## Coloring
103+
104+
```julia
105+
S = S1
106+
c1 = ADTypes.column_coloring(S, GreedyColoringAlgorithm())
107+
c2 = SDT.matrix_colors(S)
108+
@test c1 == c2
109+
```
110+
111+
```julia
112+
tc1 = @belapsed ADTypes.column_coloring($S, GreedyColoringAlgorithm())
113+
println("Coloring with SparseMatrixColorings: $tc1 s")
114+
```
115+
116+
```julia
117+
tc2 = @belapsed SDT.matrix_colors($S)
118+
println("Coloring with SDT: $tc2 s")
119+
```
120+
121+
```julia
122+
println("Speedup from new coloring method (>1 is better): $(tc2 / tc1)")
123+
```
124+
125+
## Compressed differentiation
126+
127+
```julia
128+
backend = AutoSparse(
129+
AutoForwardDiff();
130+
sparsity_detector=TracerSparsityDetector(),
131+
coloring_algorithm=GreedyColoringAlgorithm(),
132+
);
133+
```
134+
135+
```julia
136+
extras = DI.prepare_jacobian(f!, similar(y0), backend, x0);
137+
J1 = DI.jacobian!(f!, similar(y0), similar(S, eltype(x0)), backend, x0, extras)
138+
139+
cache = SDT.sparse_jacobian_cache(
140+
backend, SDT.JacPrototypeSparsityDetection(; jac_prototype=S), f!, similar(y0), x0
141+
);
142+
J2 = SDT.sparse_jacobian!(similar(S, eltype(x0)), backend, cache, f!, similar(y0), x0)
143+
144+
@test J1 == J2
145+
```
146+
147+
```julia
148+
tj1 = @belapsed DI.jacobian!($f!, _y, _J, $backend, $x0, _extras) evals = 1 samples = 100 setup = (
149+
_y = similar(y0);
150+
_J = similar(S, eltype(x0));
151+
_extras = DI.prepare_jacobian(f!, similar(y0), backend, x0)
152+
)
153+
println("Jacobian with DifferentiationInterface: $tj1 s")
154+
```
155+
156+
```julia
157+
tj2 = @belapsed SDT.sparse_jacobian!(_J, $backend, _cache, $f!, _y, x0) evals = 1 samples = 100 setup = (
158+
_y = similar(y0);
159+
_J = similar(S, eltype(x0));
160+
_cache = SDT.sparse_jacobian_cache(
161+
backend, SDT.JacPrototypeSparsityDetection(; jac_prototype=S), f!, similar(y0), x0
162+
)
163+
)
164+
println("Jacobian with SparseDiffTools: $tj2 s")
165+
```
166+
167+
```julia
168+
println("Speedup from new differentiation method (>1 is better): $(tj2 / tj1)")
169+
```

0 commit comments

Comments
 (0)