|
| 1 | +## Simple 2D example from (Kelley2003)[@cite] |
| 2 | + |
| 3 | +using NewtonKrylov, LinearAlgebra |
| 4 | +using CairoMakie |
| 5 | +using Krylov, Enzyme |
| 6 | + |
| 7 | +function F!(res, x) |
| 8 | + res[1] = x[1]^2 + x[2]^2 - 2 |
| 9 | + return res[2] = exp(x[1] - 1) + x[2]^2 - 2 |
| 10 | +end |
| 11 | + |
| 12 | +function F(x) |
| 13 | + res = similar(x) |
| 14 | + F!(res, x) |
| 15 | + return res |
| 16 | +end |
| 17 | + |
| 18 | +function halley(F!, u, res; |
| 19 | + tol_rel = 1.0e-6, |
| 20 | + tol_abs = 1.0e-12, |
| 21 | + max_niter = 50, |
| 22 | + Solver = GmresSolver, |
| 23 | + ) |
| 24 | + |
| 25 | + F!(res, u) # res = F(u) |
| 26 | + n_res = norm(res) |
| 27 | + |
| 28 | + tol = tol_rel * n_res + tol_abs |
| 29 | + |
| 30 | + J = NewtonKrylov.JacobianOperator(F!, res, u) |
| 31 | + H = NewtonKrylov.HessianOperator(J) |
| 32 | + solver = Solver(J, res) |
| 33 | + |
| 34 | + for i in :max_niter |
| 35 | + if n_res <= tol |
| 36 | + break |
| 37 | + end |
| 38 | + solve!(solver, J, copy(res)) # J \ fx |
| 39 | + a = copy(solver.x) |
| 40 | + |
| 41 | + # calculate hvvp (2nd order directional derivative using the JVP) |
| 42 | + hvvp = similar(res) |
| 43 | + mul!(hvvp, H, a) |
| 44 | + |
| 45 | + solve!(solver, J, hvvp) # J \ hvvp |
| 46 | + b = solver.x |
| 47 | + |
| 48 | + # update |
| 49 | + @. u -= (a * a) / (a - b / 2) |
| 50 | + |
| 51 | + end |
| 52 | +end |
| 53 | + |
| 54 | +# u = [2.0, 0.5] |
| 55 | +# res = zeros(2) |
| 56 | +# J = NewtonKrylov.JacobianOperator(F!,u,res) |
| 57 | +# F!(res, u) |
| 58 | +# a, stats = gmres(J, copy(res)) |
| 59 | + |
| 60 | +# J_cache = Enzyme.make_zero(J) |
| 61 | +# out = similar(J.res) |
| 62 | +# hvvp = Enzyme.make_zero(out) |
| 63 | +# du = Enzyme.make_zero(J.u) |
| 64 | +# autodiff(Forward, LinearAlgebra.mul!, |
| 65 | +# DuplicatedNoNeed(out, hvvp), |
| 66 | +# DuplicatedNoNeed(J, J_cache), |
| 67 | +# DuplicatedNoNeed(du, a)) |
| 68 | + |
| 69 | +# hvvp |
| 70 | + |
| 71 | +# b, stats = gmres(J, hvvp) |
| 72 | +# @. u -= (a * a) / (a - b / 2) |
| 73 | + |
| 74 | +# a |
| 75 | + |
| 76 | + |
| 77 | +dg_ad(x, dx) = autodiff(Forward, flux, DuplicatedNoNeed(x, dx))[1] |
| 78 | +ddg_ad(x, dx, ddx) = autodiff(Forward, dg_ad, DuplicatedNoNeed(x, dx), |
| 79 | + DuplicatedNoNeed(dx, ddx))[1] |
| 80 | + |
| 81 | +xs = LinRange(-3, 8, 1000) |
| 82 | +ys = LinRange(-15, 10, 1000) |
| 83 | + |
| 84 | +levels = [0.1, 0.25, 0.5:2:10..., 10.0:10:200..., 200:100:4000...] |
| 85 | + |
| 86 | +fig, ax = contour(xs, ys, (x, y) -> norm(F([x, y])); levels) |
| 87 | + |
| 88 | +trace_1 = let x₀ = [2.0, 0.5] |
| 89 | + xs = Vector{Tuple{Float64, Float64}}(undef, 0) |
| 90 | + hist(x, res, n_res) = (push!(xs, (x[1], x[2])); nothing) |
| 91 | + x, stats = newton_krylov!(F!, x₀, callback = hist) |
| 92 | + xs |
| 93 | +end |
| 94 | +lines!(ax, trace_1) |
| 95 | + |
| 96 | +trace_2 = let x₀ = [2.5, 3.0] |
| 97 | + xs = Vector{Tuple{Float64, Float64}}(undef, 0) |
| 98 | + hist(x, res, n_res) = (push!(xs, (x[1], x[2])); nothing) |
| 99 | + x, stats = newton_krylov!(F!, x₀, callback = hist) |
| 100 | + xs |
| 101 | +end |
| 102 | +lines!(ax, trace_2) |
| 103 | + |
| 104 | +trace_3 = let x₀ = [3.0, 4.0] |
| 105 | + xs = Vector{Tuple{Float64, Float64}}(undef, 0) |
| 106 | + hist(x, res, n_res) = (push!(xs, (x[1], x[2])); nothing) |
| 107 | + x, stats = newton_krylov!(F!, x₀, callback = hist, forcing = NewtonKrylov.EisenstatWalker(η_max = 0.68949), verbose = 1) |
| 108 | + @show stats.solved |
| 109 | + xs |
| 110 | +end |
| 111 | +lines!(ax, trace_3) |
| 112 | + |
| 113 | +fig |
0 commit comments