|
| 1 | +# # Exterior of ellipsoid |
| 2 | + |
| 3 | +#md # [](@__BINDER_ROOT_URL__/generated/Polynomial Optimization/ellipsoid.ipynb) |
| 4 | +#md # [](@__NBVIEWER_ROOT_URL__/generated/Polynomial Optimization/ellipsoid.ipynb) |
| 5 | +# **Adapted from**: Section 3.5 of [F99] and Table 5.1 of [Las09] |
| 6 | +# |
| 7 | +# [F99] Floudas, Christodoulos A. et al. |
| 8 | +# *Handbook of Test Problems in Local and Global Optimization.* |
| 9 | +# Nonconvex Optimization and Its Applications (NOIA, volume 33). |
| 10 | +# |
| 11 | +# [Las09] Lasserre, J. B. |
| 12 | +# *Moments, positive polynomials and their applications* |
| 13 | +# World Scientific, **2009**. |
| 14 | + |
| 15 | +# ## Introduction |
| 16 | + |
| 17 | +# Consider the polynomial optimization problem [F99, Section 3.5] |
| 18 | + |
| 19 | +A = [ |
| 20 | + 0 0 1 |
| 21 | + 0 -1 0 |
| 22 | + -2 1 -1 |
| 23 | +] |
| 24 | +bz = [3, 0, -4] - [0, -1, -6] |
| 25 | +y = [1.5, -0.5, -5] |
| 26 | + |
| 27 | +using Test #src |
| 28 | +using DynamicPolynomials |
| 29 | +@polyvar x[1:3] |
| 30 | +p = -2x[1] + x[2] - x[3] |
| 31 | +using SumOfSquares |
| 32 | +e = A * x - y |
| 33 | +f = e'e - bz'bz / 4 |
| 34 | +K = @set sum(x) <= 4 && 3x[2] + x[3] <= 6 && f >= 0 && 0 <= x[1] && x[1] <= 2 && 0 <= x[2] && 0 <= x[3] && x[3] <= 3 |
| 35 | + |
| 36 | +# We will now see how to find the optimal solution using Sum of Squares Programming. |
| 37 | +# We first need to pick an SDP solver, see [here](https://jump.dev/JuMP.jl/v1.12/installation/#Supported-solvers) for a list of the available choices. |
| 38 | + |
| 39 | +import Clarabel |
| 40 | +solver = Clarabel.Optimizer |
| 41 | + |
| 42 | +# A Sum-of-Squares certificate that $p \ge \alpha$ over the domain `S`, ensures that $\alpha$ is a lower bound to the polynomial optimization problem. |
| 43 | +# The following function searches for the largest lower bound and finds zero using the `d`th level of the hierarchy`. |
| 44 | + |
| 45 | +function solve(d) |
| 46 | + model = SOSModel(solver) |
| 47 | + @variable(model, α) |
| 48 | + @objective(model, Max, α) |
| 49 | + @constraint(model, c, p >= α, domain = K, maxdegree = d) |
| 50 | + optimize!(model) |
| 51 | + println(solution_summary(model)) |
| 52 | + return model |
| 53 | +end |
| 54 | + |
| 55 | +# The first level of the hierarchy gives a lower bound of `-7`` |
| 56 | + |
| 57 | +model2 = solve(2) |
| 58 | +nothing # hide |
| 59 | +@test objective_value(model2) ≈ -6 rtol=1e-4 #src |
| 60 | +@test termination_status(model2) == MOI.OPTIMAL #src |
| 61 | + |
| 62 | +# The second level improves the lower bound |
| 63 | + |
| 64 | +model4 = solve(4) |
| 65 | +nothing # hide |
| 66 | +@test objective_value(model4) ≈ -74/13 rtol=1e-4 #src |
| 67 | +@test termination_status(model4) == MOI.OPTIMAL #src |
| 68 | + |
| 69 | +# The third level improves it even further |
| 70 | + |
| 71 | +model6 = solve(6) |
| 72 | +nothing # hide |
| 73 | +@test objective_value(model6) ≈ -4.06848 rtol=1e-4 #src |
| 74 | +@test termination_status(model6) == MOI.OPTIMAL #src |
| 75 | + |
| 76 | +# The fourth level finds the optimal objective value as lower bound. |
| 77 | + |
| 78 | +model8 = solve(8) |
| 79 | +nothing # hide |
| 80 | +@test objective_value(model8) ≈ -4 rtol=1e-4 #src |
| 81 | +@test termination_status(model8) == MOI.OPTIMAL #src |
0 commit comments