|
| 1 | +# # Bilinear terms |
| 2 | + |
| 3 | +#md # [](@__BINDER_ROOT_URL__/generated/Polynomial Optimization/bilinear.ipynb) |
| 4 | +#md # [](@__NBVIEWER_ROOT_URL__/generated/Polynomial Optimization/bilinear.ipynb) |
| 5 | +# **Adapted from**: Section 3.1 of [F99] and Table 5.1 of [Las09] |
| 6 | +# |
| 7 | +# [F99] Floudas, Christodoulos A. et al. |
| 8 | +# *Handbook of Test Problems in Local and Global Optimization.* |
| 9 | +# Nonconvex Optimization and Its Applications (NOIA, volume 33). |
| 10 | +# |
| 11 | +# [Las09] Lasserre, J. B. |
| 12 | +# *Moments, positive polynomials and their applications* |
| 13 | +# World Scientific, **2009**. |
| 14 | + |
| 15 | +# ## Introduction |
| 16 | + |
| 17 | +# Consider the polynomial optimization problem [F99, Section 3.1] |
| 18 | + |
| 19 | +using Test #src |
| 20 | +using DynamicPolynomials |
| 21 | +@polyvar x[1:8] |
| 22 | +p = sum(x[1:3]) |
| 23 | +using SumOfSquares |
| 24 | +K = @set 0.0025 * (x[4] + x[6]) <= 1 && |
| 25 | + 0.0025 * (-x[4] + x[5] + x[7]) <= 1 && |
| 26 | + 0.01 * (-x[5] + x[8]) <= 1 && |
| 27 | + 100x[1] - x[1] * x[6] + 8333.33252x[4] <= 250000/3 && |
| 28 | + x[2] * x[4] - x[2] * x[7] - 1250x[4] + 1250x[5] <= 0 && |
| 29 | + x[3] * x[5] - x[3] * x[8] - 2500x[5] + 1250000 <= 0 && |
| 30 | + 100 <= x[1] && x[1] <= 10000 && |
| 31 | + 1000 <= x[2] && x[2] <= 10000 && |
| 32 | + 1000 <= x[3] && x[3] <= 10000 && |
| 33 | + 10 <= x[4] && x[4] <= 1000 && |
| 34 | + 10 <= x[5] && x[5] <= 1000 && |
| 35 | + 10 <= x[6] && x[6] <= 1000 && |
| 36 | + 10 <= x[7] && x[7] <= 1000 && |
| 37 | + 10 <= x[8] && x[8] <= 1000 |
| 38 | + |
| 39 | +# We will now see how to find the optimal solution using Sum of Squares Programming. |
| 40 | +# We first need to pick an SDP solver, see [here](https://jump.dev/JuMP.jl/v1.12/installation/#Supported-solvers) for a list of the available choices. |
| 41 | + |
| 42 | +import Clarabel |
| 43 | +solver = Clarabel.Optimizer |
| 44 | + |
| 45 | +# A Sum-of-Squares certificate that $p \ge \alpha$ over the domain `S`, ensures that $\alpha$ is a lower bound to the polynomial optimization problem. |
| 46 | +# The following function searches for the largest lower bound and finds zero using the `d`th level of the hierarchy`. |
| 47 | + |
| 48 | +function solve(d) |
| 49 | + model = SOSModel(solver) |
| 50 | + @variable(model, α) |
| 51 | + @objective(model, Max, α) |
| 52 | + @constraint(model, c, p >= α, domain = K, maxdegree = d) |
| 53 | + optimize!(model) |
| 54 | + println(solution_summary(model)) |
| 55 | + return model |
| 56 | +end |
| 57 | + |
| 58 | +# The first level of the hierarchy gives a lower bound of `2100` |
| 59 | + |
| 60 | +model2 = solve(2) |
| 61 | +@test objective_value(model2) ≈ 2100 rtol=1e-4 #src |
| 62 | +@test termination_status(model2) == MOI.OPTIMAL #src |
0 commit comments