|
| 1 | +using Optimization, OptimizationIpopt |
| 2 | +using Zygote |
| 3 | +using Test |
| 4 | +using LinearAlgebra |
| 5 | + |
| 6 | +@testset "Additional Ipopt Examples" begin |
| 7 | + |
| 8 | + # @testset "Recursive NLP Example" begin |
| 9 | + # # Based on recursive_nlp example from Ipopt |
| 10 | + # # Inner problem: minimize exp[0.5 * (x - a)^2 + 0.5 * x^2] w.r.t x |
| 11 | + # # Outer problem: minimize exp[0.5 * (y(a) - a)^2 + 0.5 * y(a)^2] w.r.t a |
| 12 | + |
| 13 | + # function inner_objective(x, a) |
| 14 | + # arg = 0.5 * (x[1] - a)^2 + 0.5 * x[1]^2 |
| 15 | + # return exp(arg) |
| 16 | + # end |
| 17 | + |
| 18 | + # function outer_objective(a, p) |
| 19 | + # # Solve inner problem |
| 20 | + # inner_optfunc = OptimizationFunction((x, _) -> inner_objective(x, a[1]), Optimization.AutoZygote()) |
| 21 | + # inner_prob = OptimizationProblem(inner_optfunc, [2.0], nothing) |
| 22 | + # inner_sol = solve(inner_prob, IpoptOptimizer()) |
| 23 | + |
| 24 | + # y = inner_sol.u[1] |
| 25 | + # arg = 0.5 * (y - a[1])^2 + 0.5 * y^2 |
| 26 | + # return exp(arg) |
| 27 | + # end |
| 28 | + |
| 29 | + # optfunc = OptimizationFunction(outer_objective, Optimization.AutoZygote()) |
| 30 | + # prob = OptimizationProblem(optfunc, [2.0], nothing) |
| 31 | + # sol = solve(prob, IpoptOptimizer()) |
| 32 | + |
| 33 | + # @test SciMLBase.successful_retcode(sol) |
| 34 | + # @test sol.u[1] ≈ 0.0 atol=1e-4 |
| 35 | + # end |
| 36 | + |
| 37 | + @testset "Simple 2D Example (MyNLP)" begin |
| 38 | + # Based on MyNLP example from Ipopt |
| 39 | + # minimize -x[1] - x[2] |
| 40 | + # s.t. x[2] - x[1]^2 = 0 |
| 41 | + # -1 <= x[1] <= 1 |
| 42 | + |
| 43 | + function simple_objective(x, p) |
| 44 | + return -x[1] - x[2] |
| 45 | + end |
| 46 | + |
| 47 | + function simple_constraint(res, x, p) |
| 48 | + res[1] = x[2] - x[1]^2 |
| 49 | + end |
| 50 | + |
| 51 | + optfunc = OptimizationFunction(simple_objective, Optimization.AutoZygote(); |
| 52 | + cons = simple_constraint) |
| 53 | + prob = OptimizationProblem(optfunc, [0.0, 0.0], nothing; |
| 54 | + lb = [-1.0, -Inf], |
| 55 | + ub = [1.0, Inf], |
| 56 | + lcons = [0.0], |
| 57 | + ucons = [0.0]) |
| 58 | + sol = solve(prob, IpoptOptimizer()) |
| 59 | + |
| 60 | + @test SciMLBase.successful_retcode(sol) |
| 61 | + @test sol.u[1] ≈ 1.0 atol=1e-6 |
| 62 | + @test sol.u[2] ≈ 1.0 atol=1e-6 |
| 63 | + @test sol.objective ≈ -2.0 atol=1e-6 |
| 64 | + end |
| 65 | + |
| 66 | + @testset "Luksan-Vlcek Problem 1" begin |
| 67 | + # Based on LuksanVlcek1 from Ipopt examples |
| 68 | + # Variable dimension problem |
| 69 | + |
| 70 | + function lv1_objective(x, p) |
| 71 | + n = length(x) |
| 72 | + obj = 0.0 |
| 73 | + for i in 1:(n-1) |
| 74 | + obj += 100 * (x[i]^2 - x[i+1])^2 + (x[i] - 1)^2 |
| 75 | + end |
| 76 | + return obj |
| 77 | + end |
| 78 | + |
| 79 | + function lv1_constraints(res, x, p) |
| 80 | + n = length(x) |
| 81 | + for i in 1:(n-2) |
| 82 | + res[i] = 3 * x[i+1]^3 + 2 * x[i+2] - 5 + sin(x[i+1] - x[i+2]) * sin(x[i+1] + x[i+2]) + |
| 83 | + 4 * x[i+1] - x[i] * exp(x[i] - x[i+1]) - 3 |
| 84 | + end |
| 85 | + end |
| 86 | + |
| 87 | + # Test with n = 5 |
| 88 | + n = 5 |
| 89 | + x0 = fill(3.0, n) |
| 90 | + |
| 91 | + optfunc = OptimizationFunction(lv1_objective, Optimization.AutoZygote(); |
| 92 | + cons = lv1_constraints) |
| 93 | + prob = OptimizationProblem(optfunc, x0, nothing; |
| 94 | + lcons = fill(4.0, n-2), |
| 95 | + ucons = fill(6.0, n-2)) |
| 96 | + sol = solve(prob, IpoptOptimizer(); maxiters = 1000, abstol=1e-6) |
| 97 | + |
| 98 | + @test SciMLBase.successful_retcode(sol) |
| 99 | + @test length(sol.u) == n |
| 100 | + # Check constraints are satisfied |
| 101 | + res = zeros(n-2) |
| 102 | + lv1_constraints(res, sol.u, nothing) |
| 103 | + @test all(4.0 .<= res .<= 6.0) |
| 104 | + end |
| 105 | + |
| 106 | + @testset "Bound Constrained Quadratic" begin |
| 107 | + # Simple bound-constrained quadratic problem |
| 108 | + # minimize (x-2)^2 + (y-3)^2 |
| 109 | + # s.t. 0 <= x <= 1, 0 <= y <= 2 |
| 110 | + |
| 111 | + quadratic(x, p) = (x[1] - 2)^2 + (x[2] - 3)^2 |
| 112 | + |
| 113 | + optfunc = OptimizationFunction(quadratic, Optimization.AutoZygote()) |
| 114 | + prob = OptimizationProblem(optfunc, [0.5, 1.0], nothing; |
| 115 | + lb = [0.0, 0.0], |
| 116 | + ub = [1.0, 2.0]) |
| 117 | + sol = solve(prob, IpoptOptimizer()) |
| 118 | + |
| 119 | + @test SciMLBase.successful_retcode(sol) |
| 120 | + @test sol.u[1] ≈ 1.0 atol=1e-6 |
| 121 | + @test sol.u[2] ≈ 2.0 atol=1e-6 |
| 122 | + end |
| 123 | + |
| 124 | + @testset "Nonlinear Least Squares" begin |
| 125 | + # Formulate a nonlinear least squares problem |
| 126 | + # minimize sum((y[i] - f(x, t[i]))^2) |
| 127 | + # where f(x, t) = x[1] * exp(x[2] * t) |
| 128 | + |
| 129 | + t = [0.0, 0.5, 1.0, 1.5, 2.0] |
| 130 | + y_data = [1.0, 1.5, 2.1, 3.2, 4.8] # Some noisy exponential data |
| 131 | + |
| 132 | + function nls_objective(x, p) |
| 133 | + sum_sq = 0.0 |
| 134 | + for i in 1:length(t) |
| 135 | + pred = x[1] * exp(x[2] * t[i]) |
| 136 | + sum_sq += (y_data[i] - pred)^2 |
| 137 | + end |
| 138 | + return sum_sq |
| 139 | + end |
| 140 | + |
| 141 | + optfunc = OptimizationFunction(nls_objective, Optimization.AutoZygote()) |
| 142 | + prob = OptimizationProblem(optfunc, [1.0, 0.5], nothing) |
| 143 | + sol = solve(prob, IpoptOptimizer()) |
| 144 | + |
| 145 | + @test SciMLBase.successful_retcode(sol) |
| 146 | + @test sol.objective < 0.1 # Should fit reasonably well |
| 147 | + end |
| 148 | + |
| 149 | + @testset "Mixed Integer-like Problem (Relaxed)" begin |
| 150 | + # Solve a problem that would normally have integer constraints |
| 151 | + # but relax to continuous |
| 152 | + # minimize x^2 + y^2 |
| 153 | + # s.t. x + y >= 3.5 |
| 154 | + # 0 <= x, y <= 5 |
| 155 | + |
| 156 | + objective(x, p) = x[1]^2 + x[2]^2 |
| 157 | + |
| 158 | + function constraint(res, x, p) |
| 159 | + res[1] = x[1] + x[2] |
| 160 | + end |
| 161 | + |
| 162 | + optfunc = OptimizationFunction(objective, Optimization.AutoZygote(); |
| 163 | + cons = constraint) |
| 164 | + prob = OptimizationProblem(optfunc, [2.0, 2.0], nothing; |
| 165 | + lb = [0.0, 0.0], |
| 166 | + ub = [5.0, 5.0], |
| 167 | + lcons = [3.5], |
| 168 | + ucons = [Inf]) |
| 169 | + sol = solve(prob, IpoptOptimizer()) |
| 170 | + |
| 171 | + @test SciMLBase.successful_retcode(sol) |
| 172 | + @test sol.u[1] + sol.u[2] ≈ 3.5 atol=1e-6 |
| 173 | + @test sol.u[1] ≈ sol.u[2] atol=1e-6 # By symmetry |
| 174 | + end |
| 175 | + |
| 176 | + @testset "Barrier Method Test" begin |
| 177 | + # Test problem where barrier method is particularly relevant |
| 178 | + # minimize -log(x) - log(y) |
| 179 | + # s.t. x + y <= 2 |
| 180 | + # x, y > 0 |
| 181 | + |
| 182 | + function barrier_objective(x, p) |
| 183 | + if x[1] <= 0 || x[2] <= 0 |
| 184 | + return Inf |
| 185 | + end |
| 186 | + return -log(x[1]) - log(x[2]) |
| 187 | + end |
| 188 | + |
| 189 | + function barrier_constraint(res, x, p) |
| 190 | + res[1] = x[1] + x[2] |
| 191 | + end |
| 192 | + |
| 193 | + optfunc = OptimizationFunction(barrier_objective, Optimization.AutoZygote(); |
| 194 | + cons = barrier_constraint) |
| 195 | + prob = OptimizationProblem(optfunc, [0.5, 0.5], nothing; |
| 196 | + lb = [1e-6, 1e-6], |
| 197 | + ub = [Inf, Inf], |
| 198 | + lcons = [-Inf], |
| 199 | + ucons = [2.0]) |
| 200 | + sol = solve(prob, IpoptOptimizer()) |
| 201 | + |
| 202 | + @test SciMLBase.successful_retcode(sol) |
| 203 | + @test sol.u[1] + sol.u[2] ≈ 2.0 atol=1e-4 |
| 204 | + @test sol.u[1] ≈ 1.0 atol=1e-4 |
| 205 | + @test sol.u[2] ≈ 1.0 atol=1e-4 |
| 206 | + end |
| 207 | + |
| 208 | + @testset "Large Scale Sparse Problem" begin |
| 209 | + # Create a sparse optimization problem |
| 210 | + # minimize sum(x[i]^2) + sum((x[i] - x[i+1])^2) |
| 211 | + # s.t. x[1] + x[n] >= 1 |
| 212 | + |
| 213 | + n = 20 |
| 214 | + |
| 215 | + function sparse_objective(x, p) |
| 216 | + obj = sum(x[i]^2 for i in 1:n) |
| 217 | + obj += sum((x[i] - x[i+1])^2 for i in 1:(n-1)) |
| 218 | + return obj |
| 219 | + end |
| 220 | + |
| 221 | + function sparse_constraint(res, x, p) |
| 222 | + res[1] = x[1] + x[n] |
| 223 | + end |
| 224 | + |
| 225 | + optfunc = OptimizationFunction(sparse_objective, Optimization.AutoZygote(); |
| 226 | + cons = sparse_constraint) |
| 227 | + x0 = fill(0.1, n) |
| 228 | + prob = OptimizationProblem(optfunc, x0, nothing; |
| 229 | + lcons = [1.0], |
| 230 | + ucons = [Inf]) |
| 231 | + sol = solve(prob, IpoptOptimizer()) |
| 232 | + |
| 233 | + @test SciMLBase.successful_retcode(sol) |
| 234 | + @test sol.u[1] + sol.u[n] >= 1.0 - 1e-6 |
| 235 | + end |
| 236 | +end |
| 237 | + |
| 238 | +@testset "Different Hessian Approximations" begin |
| 239 | + # Test various Hessian approximation methods |
| 240 | + |
| 241 | + rosenbrock(x, p) = (p[1] - x[1])^2 + p[2] * (x[2] - x[1]^2)^2 |
| 242 | + |
| 243 | + x0 = [0.0, 0.0] |
| 244 | + p = [1.0, 100.0] |
| 245 | + |
| 246 | + @testset "BFGS approximation" begin |
| 247 | + optfunc = OptimizationFunction(rosenbrock, Optimization.AutoZygote()) |
| 248 | + prob = OptimizationProblem(optfunc, x0, p) |
| 249 | + sol = solve(prob, IpoptOptimizer(); |
| 250 | + hessian_approximation = "limited-memory") |
| 251 | + |
| 252 | + @test SciMLBase.successful_retcode(sol) |
| 253 | + @test sol.u ≈ [1.0, 1.0] atol=1e-4 |
| 254 | + end |
| 255 | + |
| 256 | + @testset "SR1 approximation" begin |
| 257 | + optfunc = OptimizationFunction(rosenbrock, Optimization.AutoZygote()) |
| 258 | + prob = OptimizationProblem(optfunc, x0, p) |
| 259 | + sol = solve(prob, IpoptOptimizer(); |
| 260 | + hessian_approximation = "limited-memory", |
| 261 | + limited_memory_update_type = "sr1") |
| 262 | + |
| 263 | + @test SciMLBase.successful_retcode(sol) |
| 264 | + @test sol.u ≈ [1.0, 1.0] atol=1e-4 |
| 265 | + end |
| 266 | +end |
| 267 | + |
| 268 | +@testset "Warm Start Tests" begin |
| 269 | + # Test warm starting capabilities |
| 270 | + |
| 271 | + rosenbrock(x, p) = (p[1] - x[1])^2 + p[2] * (x[2] - x[1]^2)^2 |
| 272 | + |
| 273 | + x0 = [0.5, 0.5] |
| 274 | + p = [1.0, 100.0] |
| 275 | + |
| 276 | + optfunc = OptimizationFunction(rosenbrock, Optimization.AutoZygote()) |
| 277 | + prob = OptimizationProblem(optfunc, x0, p) |
| 278 | + |
| 279 | + # First solve |
| 280 | + cache = init(prob, IpoptOptimizer()) |
| 281 | + sol1 = solve!(cache) |
| 282 | + |
| 283 | + @test SciMLBase.successful_retcode(sol1) |
| 284 | + @test sol1.u ≈ [1.0, 1.0] atol=1e-4 |
| 285 | + |
| 286 | + # Note: Full warm start testing would require modifying the problem |
| 287 | + # and resolving, which needs reinit!/remake functionality |
| 288 | +end |
0 commit comments