|
| 1 | + |
1 | 2 | using Test |
2 | 3 | using Optimization |
3 | | -using Optimization.SciMLBase # for NoAD |
| 4 | +using Optimization.SciMLBase |
| 5 | +using Optimization.ADTypes |
4 | 6 | using OptimizationODE |
5 | 7 |
|
6 | | -# Helpers |
7 | | -make_zeros(u) = fill(zero(eltype(u)), length(u)) |
8 | | -make_ones(u) = fill(one(eltype(u)), length(u)) |
| 8 | +function quad(u, p) |
| 9 | + return sum(u .^ 2) + p[1] * u[2]^2 |
| 10 | +end |
9 | 11 |
|
10 | | -# Quadratic and Rosenbrock + gradients |
11 | | -quad(u,p) = u[1]^2 + p[1]*u[2]^2 |
12 | | -quad_grad!(g,u,p,data) = (g[1]=2u[1]; g[2]=2p[1]*u[2]; g) |
| 12 | +function quad_grad!(g, u, p, data) |
| 13 | + g[1] = 2u[1] |
| 14 | + g[2] = 2p[1]*u[2] |
| 15 | + return g |
| 16 | +end |
13 | 17 |
|
14 | | -rosen(u,p) = (p[1]-u[1])^2 + p[2]*(u[2]-u[1]^2)^2 |
15 | | -rosen_grad!(g,u,p,data) = ( |
16 | | - g[1] = -2*(p[1]-u[1]) - 4*p[2]*u[1]*(u[2]-u[1]^2); |
17 | | - g[2] = 2*p[2]*(u[2]-u[1]^2); |
18 | | - g |
19 | | -) |
| 18 | +function rosenbrock(u, p) |
| 19 | + return (p[1] - u[1])^2 + p[2]*(u[2] - u[1]^2)^2 |
| 20 | +end |
20 | 21 |
|
| 22 | +function rosenbrock_grad!(g, u, p, data) |
| 23 | + g[1] = -2(p[1] - u[1]) - 4p[2]*u[1]*(u[2] - u[1]^2) |
| 24 | + g[2] = 2p[2]*(u[2] - u[1]^2) |
| 25 | + return g |
| 26 | +end |
21 | 27 |
|
22 | 28 | @testset "OptimizationODE Solvers" begin |
23 | | - |
24 | | - # Setup Quadratic |
25 | | - u0q, pq = [2.0,-3.0], [5.0] |
26 | | - fq = OptimizationFunction(quad, SciMLBase.NoAD(); grad=quad_grad!) |
27 | | - probQ = OptimizationProblem(fq, u0q, pq) |
28 | | - |
29 | | - @testset "ODEGradientDescent on Quadratic" begin |
30 | | - sol = solve(probQ, ODEGradientDescent(); η=0.1, tmax=200.0, dt=0.05) |
31 | | - @test isapprox(sol.u, make_zeros(sol.u); atol=1e-2) |
32 | | - @test sol.retcode == ReturnCode.Success |
33 | | - end |
34 | | - |
35 | | - @testset "ODEGradientDescent on Rosenbrock" begin |
36 | | - u0r, pr = [-1.2,1.0], [1.0,100.0] |
37 | | - fr = OptimizationFunction(rosen, SciMLBase.NoAD(); grad=rosen_grad!) |
38 | | - probR = OptimizationProblem(fr, u0r, pr) |
39 | | - solR = solve(probR, ODEGradientDescent(); η=5e-3, tmax=5000.0, dt=5e-3) |
40 | | - @test isapprox(solR.u, make_ones(solR.u); atol=2e-2) |
41 | | - @test solR.retcode == ReturnCode.Success |
42 | | - end |
43 | | - |
44 | | - @testset "ODEGradientDescent on Rosenbrock (AutoForwardDiff)" begin |
45 | | - using Optimization.ADTypes |
46 | | - u0 = [0.0, 0.0] |
47 | | - p = [1.0, 100.0] |
48 | | - |
49 | | - function rosen(u, p) |
50 | | - return (p[1] - u[1])^2 + p[2]*(u[2] - u[1]^2)^2 |
51 | | - end |
52 | | - rosen(u, p, data) = rosen(u, p) |
53 | | - |
54 | | - f_ad = OptimizationFunction(rosen, ADTypes.AutoForwardDiff()) |
55 | | - prob_ad = OptimizationProblem(f_ad, u0, p) |
56 | | - |
57 | | - sol = solve(prob_ad, ODEGradientDescent(); η = 0.005, tmax = 5000.0, dt = 0.01) |
58 | | - |
59 | | - @test isapprox(sol.u[1], 1.0; atol = 0.01) |
60 | | - @test isapprox(sol.u[2], 1.0; atol = 0.01) |
61 | | - @test sol.retcode == ReturnCode.Success |
62 | | - end |
63 | | - |
64 | | - @testset "RKChebyshevDescent on Quadratic" begin |
65 | | - solC = solve(probQ, RKChebyshevDescent(); |
66 | | - use_hessian=true, η=0.1, s=8, maxiters=200) |
67 | | - @test isapprox(solC.u, make_zeros(solC.u); atol=1e-2) |
68 | | - end |
69 | | - |
70 | | - @testset "RKAccelerated on Quadratic" begin |
71 | | - solA = solve(probQ, RKAccelerated(); η=0.5, p=2.0, s=4, maxiters=200) |
72 | | - @test isapprox(solA.u, make_zeros(solA.u); atol=1e-2) |
73 | | - end |
74 | | - |
75 | | - @testset "PRKChebyshevDescent on Quadratic" begin |
76 | | - sol2 = solve(probQ, PRKChebyshevDescent(); |
77 | | - use_hessian=true, η=0.5, s=6, maxiters=100, reestimate=20) |
78 | | - @test isapprox(sol2.u, make_zeros(sol2.u); atol=1e-2) |
79 | | - end |
80 | | - |
81 | | - @testset "PRKChebyshevDescent on Rosenbrock" begin |
82 | | - u0R = [0.0, 0.0] |
83 | | - pR = [1.0, 100.0] |
84 | | - fR = OptimizationFunction(rosen, SciMLBase.NoAD(); grad=rosen_grad!) |
85 | | - probR = OptimizationProblem(fR, u0R, pR) |
86 | | - obj0 = rosen(u0R, pR) |
87 | | - solR2 = solve(probR, PRKChebyshevDescent(); |
88 | | - use_hessian=true, η=0.1, s=10, maxiters=500, reestimate=50) |
89 | | - @test solR2.objective < 0.01 * obj0 |
90 | | - end |
91 | | - |
| 29 | + u0q = [2.0, -3.0] |
| 30 | + pq = [5.0] |
| 31 | + fq = OptimizationFunction(quad, SciMLBase.NoAD(); grad = quad_grad!) |
| 32 | + probQ = OptimizationProblem(fq, u0q, pq) |
| 33 | + |
| 34 | + @testset "ODEGradientDescent on Quadratic" begin |
| 35 | + sol = solve(probQ, ODEGradientDescent; dt = 0.1, maxiters = 1000) |
| 36 | + @test isapprox(sol.u, zeros(length(sol.u)); atol=1e-2) |
| 37 | + end |
| 38 | + |
| 39 | + @testset "RKChebyshevDescent on Quadratic" begin |
| 40 | + sol = solve(probQ, RKChebyshevDescent; dt = 0.1, maxiters = 1000) |
| 41 | + @test isapprox(sol.u, zeros(length(sol.u)); atol=1e-2) |
| 42 | + end |
| 43 | + |
| 44 | + @testset "RKAccelerated on Quadratic" begin |
| 45 | + sol = solve(probQ, RKAccelerated; dt = 0.1, maxiters = 1000) |
| 46 | + @test isapprox(sol.u, zeros(length(sol.u)); atol=1e-2) |
| 47 | + end |
| 48 | + |
| 49 | + @testset "PRKChebyshevDescent on Quadratic" begin |
| 50 | + sol = solve(probQ, PRKChebyshevDescent; dt = 0.1, maxiters = 1000) |
| 51 | + @test isapprox(sol.u, zeros(length(sol.u)); atol=1e-2) |
| 52 | + end |
| 53 | + |
| 54 | + u0r = [-1.2, 1.0] |
| 55 | + pr = [1.0, 100.0] |
| 56 | + |
| 57 | + for (mode, desc) in [(SciMLBase.NoAD(), "NoAD"), |
| 58 | + (ADTypes.AutoForwardDiff(), "AutoForwardDiff")] |
| 59 | + @testset "ODEGradientDescent on Rosenbrock ($desc)" begin |
| 60 | + fr = OptimizationFunction(rosenbrock, mode; grad = rosenbrock_grad!) |
| 61 | + probR = OptimizationProblem(fr, u0r, pr) |
| 62 | + sol = solve(probR, ODEGradientDescent; dt = 0.001, maxiters = 5000) |
| 63 | + @test isapprox(sol.u[1], 1.0; atol=1e-1) |
| 64 | + @test isapprox(sol.u[2], 1.0; atol=2e-1) |
| 65 | + end |
| 66 | + end |
92 | 67 | end |
0 commit comments