|
1 | | -# # Extracting minimizers |
| 1 | +# # Maximizing as minimum |
2 | 2 |
|
3 | | -#md # [](@__BINDER_ROOT_URL__/generated/Polynomial Optimization/extracting_minimizers.ipynb) |
4 | | -#md # [](@__NBVIEWER_ROOT_URL__/generated/Polynomial Optimization/extracting_minimizers.ipynb) |
5 | | -# **Adapted from**: Example 6.23 of [L09] |
| 3 | +#md # [](@__BINDER_ROOT_URL__/generated/Polynomial Optimization/min_univariate.ipynb) |
| 4 | +#md # [](@__NBVIEWER_ROOT_URL__/generated/Polynomial Optimization/min_univariate.ipynb) |
| 5 | +# **Adapted from**: Section 4.10 of [F99], Example 6.23 of [L09] and Table 5.1 of [Las09] |
| 6 | +# |
| 7 | +# [F99] Floudas, Christodoulos A. et al. |
| 8 | +# *Handbook of Test Problems in Local and Global Optimization.* |
| 9 | +# Nonconvex Optimization and Its Applications (NOIA, volume 33). |
6 | 10 | # |
7 | 11 | # [L09] Laurent, Monique. |
8 | 12 | # *Sums of squares, moment matrices and optimization over polynomials.* |
9 | 13 | # Emerging applications of algebraic geometry (2009): 157-270. |
| 14 | +# |
| 15 | +# [Las09] Lasserre, J. B. |
| 16 | +# *Moments, positive polynomials and their applications* |
| 17 | +# World Scientific, **2009**. |
10 | 18 |
|
11 | 19 | # ## Introduction |
12 | 20 |
|
13 | | -# Consider the polynomial optimization problem [L09, Example 6.23] of |
14 | | -# minimizing the linear function $-x_1 - x_2$ |
| 21 | +# Consider the polynomial optimization problem [F99, Section 4.10] |
| 22 | +# of minimizing the linear function $-x_1 - x_2$ |
15 | 23 | # over the basic semialgebraic set defined by the inequalities |
16 | 24 | # $x_2 \le 2x_1^4 - 8x_1^3 + 8x_1^2 + 2$, |
17 | 25 | # $x_2 \le 4x_1^4 - 32x_1^3 + 88x_1^2 - 96x_1 + 36$ and the box constraints |
18 | 26 | # $0 \le x_1 \le 3$ and $0 \le x_2 \le 4$, |
19 | | -# World Scientific, **2009**. |
20 | 27 |
|
21 | 28 | using Test #src |
22 | 29 | using DynamicPolynomials |
23 | 30 | @polyvar x[1:2] |
24 | 31 | p = -sum(x) |
25 | 32 | using SumOfSquares |
26 | | -K = @set x[1] >= 0 && x[1] <= 3 && x[2] >= 0 && x[2] <= 4 && x[2] <= 2x[1]^4 - 8x[1]^3 + 8x[1]^2 + 2 && x[2] <= 4x[1]^4 - 32x[1]^3 + 88x[1]^2 - 96x[1] + 36 |
| 33 | +f1 = 2x[1]^4 - 8x[1]^3 + 8x[1]^2 + 2 |
| 34 | +f2 = 4x[1]^4 - 32x[1]^3 + 88x[1]^2 - 96x[1] + 36 |
| 35 | +K = @set x[1] >= 0 && x[1] <= 3 && x[2] >= 0 && x[2] <= 4 && x[2] <= f1 && x[2] <= f2 |
| 36 | + |
| 37 | +# As we can observe below, the bounds on `x[2]` could be dropped and |
| 38 | +# optimization problem is equivalent to the maximization of `min(f1, f2)` |
| 39 | +# between `0` and `3`. |
| 40 | + |
| 41 | +xs = range(0, stop = 3, length = 100) |
| 42 | +using Plots |
| 43 | +plot(xs, f1.(xs), label = "f1") |
| 44 | +plot!(xs, f2.(xs), label = "f2") |
| 45 | +plot!(xs, 4 * ones(length(xs)), label = nothing) |
27 | 46 |
|
28 | 47 | # We will now see how to find the optimal solution using Sum of Squares Programming. |
29 | 48 | # We first need to pick an SDP solver, see [here](https://jump.dev/JuMP.jl/v1.12/installation/#Supported-solvers) for a list of the available choices. |
30 | 49 |
|
31 | | -import CSDP |
32 | | -solver = optimizer_with_attributes(CSDP.Optimizer, MOI.Silent() => true) |
| 50 | +import Clarabel |
| 51 | +solver = Clarabel.Optimizer |
33 | 52 |
|
34 | 53 | # A Sum-of-Squares certificate that $p \ge \alpha$ over the domain `S`, ensures that $\alpha$ is a lower bound to the polynomial optimization problem. |
35 | 54 | # The following function searches for the largest lower bound and finds zero using the `d`th level of the hierarchy`. |
@@ -74,3 +93,7 @@ model7 = solve(7) |
74 | 93 | x_opt = η.atoms[1].center |
75 | 94 | @test x_opt ≈ [2.3295, 3.1785] rtol=1e-4 #src |
76 | 95 | p(x_opt) |
| 96 | + |
| 97 | +# We can see visualize the solution as follows: |
| 98 | + |
| 99 | +scatter!([x_opt[1]], [x_opt[2]], markershape = :star, label = nothing) |
0 commit comments