Skip to content

Commit dcaf898

Browse files
committed
Summary plots on Brusselator sparse AD benchmarks
1 parent 0010d94 commit dcaf898

File tree

3 files changed

+926
-77
lines changed

3 files changed

+926
-77
lines changed
Lines changed: 186 additions & 71 deletions
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,5 @@
11
---
2-
title: Sparse AD benchmarks
2+
title: Brusselator sparse AD benchmarks
33
author: Guillaume Dalle
44
---
55

@@ -8,6 +8,7 @@ using ADTypes
88
using LinearAlgebra, SparseArrays
99
using BenchmarkTools, DataFrames
1010
import DifferentiationInterface as DI
11+
using Plots
1112
import SparseDiffTools as SDT
1213
using SparseConnectivityTracer: TracerSparsityDetector
1314
using SparseMatrixColorings: GreedyColoringAlgorithm
@@ -18,11 +19,8 @@ using Test
1819
## Definitions
1920

2021
```julia
21-
const N = 32
22-
const xyd_brusselator = range(0; stop=1, length=N)
23-
const p = (3.4, 1.0, 10.0, step(xyd_brusselator))
24-
2522
brusselator_f(x, y, t) = (((x - 0.3)^2 + (y - 0.6)^2) <= 0.1^2) * (t >= 1.1) * 5.0
23+
2624
limit(a, N) =
2725
if a == N + 1
2826
1
@@ -31,15 +29,18 @@ limit(a, N) =
3129
else
3230
a
3331
end;
34-
```
3532

36-
```julia
37-
function brusselator_2d_loop(du, u, p, t)
33+
function brusselator_2d!(du, u)
34+
t = 0.0
35+
N = size(u, 1)
36+
xyd = range(0; stop=1, length=N)
37+
p = (3.4, 1.0, 10.0, step(xyd))
3838
A, B, alpha, dx = p
3939
alpha = alpha / dx^2
40+
4041
@inbounds for I in CartesianIndices((N, N))
4142
i, j = Tuple(I)
42-
x, y = xyd_brusselator[I[1]], xyd_brusselator[I[2]]
43+
x, y = xyd[I[1]], xyd[I[2]]
4344
ip1, im1, jp1, jm1 = limit(i + 1, N),
4445
limit(i - 1, N), limit(j + 1, N),
4546
limit(j - 1, N)
@@ -54,10 +55,9 @@ function brusselator_2d_loop(du, u, p, t)
5455
A * u[i, j, 1] - u[i, j, 1]^2 * u[i, j, 2]
5556
end
5657
end;
57-
```
5858

59-
```julia
60-
function init_brusselator_2d(xyd)
59+
function init_brusselator_2d(N::Integer)
60+
xyd = range(0; stop=1, length=N)
6161
N = length(xyd)
6262
u = zeros(N, N, 2)
6363
for I in CartesianIndices((N, N))
@@ -70,100 +70,215 @@ function init_brusselator_2d(xyd)
7070
end;
7171
```
7272

73-
```julia
74-
x0 = init_brusselator_2d(xyd_brusselator);
75-
y0 = similar(x0);
76-
77-
f!(y, x) = brusselator_2d_loop(y, x, p, 0.0);
78-
```
79-
80-
## Sparsity detection
73+
## Correctness
8174

8275
```julia
83-
S1 = ADTypes.jacobian_sparsity(f!, y0, x0, TracerSparsityDetector())
84-
S2 = ADTypes.jacobian_sparsity(f!, y0, x0, SymbolicsSparsityDetector())
85-
@test S1 == S2
76+
x0_32 = init_brusselator_2d(32);
8677
```
8778

88-
```julia
89-
td1 = @belapsed ADTypes.jacobian_sparsity($f!, $y0, $x0, TracerSparsityDetector())
90-
println("Sparsity detection with SparseConnectivityTracer: $td1 s")
91-
```
79+
### Sparsity detection
9280

9381
```julia
94-
td2 = @belapsed ADTypes.jacobian_sparsity($f!, $y0, $x0, SymbolicsSparsityDetector())
95-
println("Sparsity detection with Symbolics: $td2 s")
96-
```
97-
98-
```julia
99-
println("Speedup from new sparsity detection method (>1 is better): $(td2 / td1)")
82+
S1 = ADTypes.jacobian_sparsity(
83+
brusselator_2d!, similar(x0_32), x0_32, TracerSparsityDetector()
84+
)
85+
S2 = ADTypes.jacobian_sparsity(
86+
brusselator_2d!, similar(x0_32), x0_32, SymbolicsSparsityDetector()
87+
)
88+
@test S1 == S2
10089
```
10190

102-
## Coloring
91+
### Coloring
10392

10493
```julia
105-
S = S1
106-
c1 = ADTypes.column_coloring(S, GreedyColoringAlgorithm())
107-
c2 = SDT.matrix_colors(S)
94+
c1 = ADTypes.column_coloring(S1, GreedyColoringAlgorithm())
95+
c2 = SDT.matrix_colors(S1)
10896
@test c1 == c2
10997
```
11098

111-
```julia
112-
tc1 = @belapsed ADTypes.column_coloring($S, GreedyColoringAlgorithm())
113-
println("Coloring with SparseMatrixColorings: $tc1 s")
114-
```
115-
116-
```julia
117-
tc2 = @belapsed SDT.matrix_colors($S)
118-
println("Coloring with SDT: $tc2 s")
119-
```
120-
121-
```julia
122-
println("Speedup from new coloring method (>1 is better): $(tc2 / tc1)")
123-
```
124-
125-
## Compressed differentiation
99+
### Differentiation
126100

127101
```julia
128102
backend = AutoSparse(
129103
AutoForwardDiff();
130104
sparsity_detector=TracerSparsityDetector(),
131105
coloring_algorithm=GreedyColoringAlgorithm(),
132106
);
133-
```
134107

135-
```julia
136-
extras = DI.prepare_jacobian(f!, similar(y0), backend, x0);
137-
J1 = DI.jacobian!(f!, similar(y0), similar(S, eltype(x0)), backend, x0, extras)
108+
extras = DI.prepare_jacobian(brusselator_2d!, similar(x0_32), backend, x0_32);
109+
J1 = DI.jacobian!(
110+
brusselator_2d!, similar(x0_32), similar(S1, eltype(x0_32)), backend, x0_32, extras
111+
)
138112

139113
cache = SDT.sparse_jacobian_cache(
140-
backend, SDT.JacPrototypeSparsityDetection(; jac_prototype=S), f!, similar(y0), x0
114+
backend,
115+
SDT.JacPrototypeSparsityDetection(; jac_prototype=S1),
116+
brusselator_2d!,
117+
similar(x0_32),
118+
x0_32,
141119
);
142-
J2 = SDT.sparse_jacobian!(similar(S, eltype(x0)), backend, cache, f!, similar(y0), x0)
120+
J2 = SDT.sparse_jacobian!(
121+
similar(S1, eltype(x0_32)), backend, cache, brusselator_2d!, similar(x0_32), x0_32
122+
)
143123

144124
@test J1 == J2
145125
```
146126

127+
## Benchmarks
128+
129+
```julia
130+
N_values = 2 .^ (2:8)
131+
```
132+
133+
### Sparsity detection
134+
135+
```julia
136+
td1, td2 = zeros(length(N_values)), zeros(length(N_values))
137+
for (i, N) in enumerate(N_values)
138+
@info "Benchmarking sparsity detection: N=$N"
139+
x0 = init_brusselator_2d(N)
140+
td1[i] = @belapsed ADTypes.jacobian_sparsity(
141+
$brusselator_2d!, $(similar(x0)), $x0, TracerSparsityDetector()
142+
)
143+
td2[i] = @belapsed ADTypes.jacobian_sparsity(
144+
$brusselator_2d!, $(similar(x0)), $x0, SymbolicsSparsityDetector()
145+
)
146+
end
147+
148+
pld = plot(;
149+
title="Sparsity detection on the Brusselator",
150+
xlabel="Input size N",
151+
ylabel="Runtime [s]",
152+
)
153+
plot!(
154+
pld,
155+
N_values,
156+
td1;
157+
lw=2,
158+
linestyle=:auto,
159+
markershape=:auto,
160+
label="SparseConnectivityTracer",
161+
)
162+
plot!(pld, N_values, td2; lw=2, linestyle=:auto, markershape=:auto, label="Symbolics")
163+
plot!(pld; xscale=:log10, yscale=:log10, legend=:topleft, minorgrid=true)
164+
pld
165+
```
166+
167+
### Coloring
168+
147169
```julia
148-
tj1 = @belapsed DI.jacobian!($f!, _y, _J, $backend, $x0, _extras) evals = 1 samples = 100 setup = (
149-
_y = similar(y0);
150-
_J = similar(S, eltype(x0));
151-
_extras = DI.prepare_jacobian(f!, similar(y0), backend, x0)
170+
tc1, tc2 = zeros(length(N_values)), zeros(length(N_values))
171+
for (i, N) in enumerate(N_values)
172+
@info "Benchmarking coloring: N=$N"
173+
x0 = init_brusselator_2d(N)
174+
S = ADTypes.jacobian_sparsity(
175+
brusselator_2d!, similar(x0), x0, TracerSparsityDetector()
176+
)
177+
tc1[i] = @belapsed ADTypes.column_coloring($S, GreedyColoringAlgorithm())
178+
tc2[i] = @belapsed SDT.matrix_colors($S)
179+
end
180+
181+
plc = plot(;
182+
title="Coloring on the Brusselator", xlabel="Input size N", ylabel="Runtime [s]"
183+
)
184+
plot!(
185+
plc,
186+
N_values,
187+
tc1;
188+
lw=2,
189+
linestyle=:auto,
190+
markershape=:auto,
191+
label="SparseMatrixColorings",
152192
)
153-
println("Jacobian with DifferentiationInterface: $tj1 s")
193+
plot!(plc, N_values, tc2; lw=2, linestyle=:auto, markershape=:auto, label="SparseDiffTools")
194+
plot!(plc; xscale=:log10, yscale=:log10, legend=:topleft, minorgrid=true)
195+
plc
154196
```
155197

198+
### Differentiation
199+
156200
```julia
157-
tj2 = @belapsed SDT.sparse_jacobian!(_J, $backend, _cache, $f!, _y, x0) evals = 1 samples = 100 setup = (
158-
_y = similar(y0);
159-
_J = similar(S, eltype(x0));
160-
_cache = SDT.sparse_jacobian_cache(
161-
backend, SDT.JacPrototypeSparsityDetection(; jac_prototype=S), f!, similar(y0), x0
201+
tj1, tj2 = zeros(length(N_values)), zeros(length(N_values))
202+
for (i, N) in enumerate(N_values)
203+
@info "Benchmarking differentiation: N=$N"
204+
x0 = init_brusselator_2d(N)
205+
S = ADTypes.jacobian_sparsity(
206+
brusselator_2d!, similar(x0), x0, TracerSparsityDetector()
162207
)
208+
J = similar(S, eltype(x0))
209+
210+
tj1[i] = @belapsed DI.jacobian!($brusselator_2d!, _y, _J, $backend, $x0, _extras) setup = (
211+
_y = similar($x0);
212+
_J = similar($J);
213+
_extras = DI.prepare_jacobian($brusselator_2d!, similar($x0), $backend, $x0)
214+
) evals = 1
215+
216+
tj2[i] = @belapsed SDT.sparse_jacobian!(_J, $backend, _cache, $brusselator_2d!, _y, $x0) setup = (
217+
_y = similar($x0);
218+
_J = similar($J);
219+
_cache = SDT.sparse_jacobian_cache(
220+
$backend,
221+
SDT.JacPrototypeSparsityDetection(; jac_prototype=$S),
222+
$brusselator_2d!,
223+
similar($x0),
224+
$x0,
225+
)
226+
) evals = 1
227+
end
228+
229+
plj = plot(;
230+
title="Sparse Jacobian on the Brusselator", xlabel="Input size N", ylabel="Runtime [s]"
163231
)
164-
println("Jacobian with SparseDiffTools: $tj2 s")
232+
plot!(
233+
plj,
234+
N_values,
235+
tj1;
236+
lw=2,
237+
linestyle=:auto,
238+
markershape=:auto,
239+
label="DifferentiationInterface",
240+
)
241+
plot!(plj, N_values, tj2; lw=2, linestyle=:auto, markershape=:auto, label="SparseDiffTools")
242+
plot!(plj; xscale=:log10, yscale=:log10, legend=:topleft, minorgrid=true)
243+
plj
165244
```
166245

246+
### Summary
247+
167248
```julia
168-
println("Speedup from new differentiation method (>1 is better): $(tj2 / tj1)")
169-
```
249+
pl = plot(;
250+
title="Is the new pipeline worth it?\nTest case: Brusselator",
251+
xlabel="Input size N",
252+
ylabel="Runtime ratio DI / SparseDiffTools",
253+
)
254+
plot!(
255+
pl,
256+
N_values,
257+
td2 ./ td1;
258+
lw=2,
259+
linestyle=:auto,
260+
markershape=:auto,
261+
label="sparsity detection speedup",
262+
)
263+
plot!(
264+
pl,
265+
N_values,
266+
tc2 ./ tc1;
267+
lw=2,
268+
linestyle=:auto,
269+
markershape=:auto,
270+
label="coloring speedup",
271+
)
272+
plot!(
273+
pl,
274+
N_values,
275+
tj2 ./ tj1;
276+
lw=2,
277+
linestyle=:auto,
278+
markershape=:auto,
279+
label="differentiation speedup",
280+
)
281+
plot!(pl, N_values, ones(length(N_values)); lw=3, color=:black, label="same speed")
282+
plot!(pl; xscale=:log10, yscale=:log10, minorgrid=true, legend=:right)
283+
pl
284+
```

0 commit comments

Comments
 (0)