Skip to content

Commit 9a9174e

Browse files
Merge pull request #472 from AlCap23/master
Add integer support for MOI
2 parents 44a233c + c6a8e80 commit 9a9174e

File tree

6 files changed

+90
-20
lines changed

6 files changed

+90
-20
lines changed

docs/src/index.md

Lines changed: 15 additions & 15 deletions
Original file line numberDiff line numberDiff line change
@@ -41,21 +41,21 @@ packages.
4141

4242
## Overview of the Optimizers
4343

44-
| Package | Local Gradient-Based | Local Hessian-Based | Local Derivative-Free | Box Constraints | Local Constrained | Global Unconstrained | Global Constrained |
45-
|:----------------------- |:--------------------:|:-------------------:|:---------------------:|:---------------:|:-----------------:|:--------------------:|:------------------:|
46-
| BlackBoxOptim | ❌ | ❌ | ❌ |✅ | ❌ | ✅ | ❌ |✅
47-
| CMAEvolutionaryStrategy |||| ||||
48-
| Evolutionary |||| ||| 🟡 |
49-
| Flux |||| ||||
50-
| GCMAES |||| ||||
51-
| MathOptInterface |||| ||| 🟡 |
52-
| MultistartOptimization |||| ||||
53-
| Metaheuristics |||| ||| 🟡 |
54-
| NOMAD |||| ||| 🟡 |
55-
| NLopt |||| | 🟡 || 🟡 |
56-
| Nonconvex |||| | 🟡 || 🟡 |
57-
| Optim |||| ||||
58-
| QuadDIRECT |||| ||||
44+
| Package | Local Gradient-Based | Local Hessian-Based | Local Derivative-Free | Box Constraints | Local Constrained | Global Unconstrained | Global Constrained |
45+
|:----------------------- |:--------------------:|:-------------------:|:---------------------:|:---------------:|:-----------------:|:--------------------:|:--------------------:|
46+
| BlackBoxOptim ||| | ||||
47+
| CMAEvolutionaryStrategy ||| | ||| |
48+
| Evolutionary ||| | ||| 🟡 |
49+
| Flux ||| | ||| |
50+
| GCMAES ||| | ||| |
51+
| MathOptInterface ||| | ||| 🟡 |
52+
| MultistartOptimization ||| | ||| |
53+
| Metaheuristics ||| | ||| 🟡 |
54+
| NOMAD ||| | ||| 🟡 |
55+
| NLopt ||| | | 🟡 || 🟡 |
56+
| Nonconvex ||| | | 🟡 || 🟡 |
57+
| Optim ||| | ||| |
58+
| QuadDIRECT ||| | ||| |
5959

6060
✅ = supported
6161

docs/src/optimization_packages/mathoptinterface.md

Lines changed: 32 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -76,3 +76,35 @@ opt = OptimizationMOI.MOI.OptimizerWithAttributes(Juniper.Optimizer,
7676
"print_level" => 0))
7777
sol = solve(prob, opt)
7878
```
79+
80+
#### Using Integer Constraints
81+
82+
The following shows how to use integer linear programming within `Optimization`. We will solve the classical Knapsack Problem using `Juniper.jl`.
83+
84+
- [`Juniper.Optimizer`](https://github.com/lanl-ansi/Juniper.jl)
85+
- Juniper requires a nonlinear optimizer to be set via the `nl_solver` option,
86+
which must be a MathOptInterface-based optimizer. See the
87+
[Juniper documentation](https://github.com/lanl-ansi/Juniper.jl) for more
88+
detail.
89+
- Allows only for binary decisions
90+
91+
```@example MOI
92+
v = [1.0, 2.0, 4.0, 3.0]
93+
w = [5.0, 4.0, 3.0, 2.0]
94+
W = 4.0
95+
u0 = [0.0, 0.0, 0.0, 1.0]
96+
97+
optfun = OptimizationFunction((u, p) -> -v'u, cons = (res, u, p) -> res .= w'u,
98+
Optimization.AutoForwardDiff())
99+
100+
optprob = OptimizationProblem(optfun, u0; lb = zero.(u0), ub = one.(u0),
101+
int = ones(Bool, length(u0)),
102+
lcons = [-Inf;], ucons = [W;])
103+
104+
nl_solver = OptimizationMOI.MOI.OptimizerWithAttributes(Ipopt.Optimizer,
105+
"print_level" => 0)
106+
minlp_solver = OptimizationMOI.MOI.OptimizerWithAttributes(Juniper.Optimizer,
107+
"nl_solver" => nl_solver)
108+
109+
res = solve(optprob, minlp_solver)
110+
```

docs/src/tutorials/intro.md

Lines changed: 6 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -45,10 +45,13 @@ sol.original
4545
```
4646

4747
## Defining the objective function
48+
4849
Optimization.jl assumes that your objective function takes two arguments `objective(x, p)`
49-
1. The optimization variables `x`.
50-
2. Other parameters `p`, such as hyper parameters of the cost function.
51-
If you have no “other parameters”, you can safely disregard this argument. If your objective function is defined by someone else, you can create an anonymous function that just discards the extra parameters like this
50+
51+
1. The optimization variables `x`.
52+
2. Other parameters `p`, such as hyper parameters of the cost function.
53+
If you have no “other parameters”, you can safely disregard this argument. If your objective function is defined by someone else, you can create an anonymous function that just discards the extra parameters like this
54+
5255
```julia
5356
obj = (x, p) -> objective(x) # Pass this function into OptimizationFunction
5457
```

lib/OptimizationMOI/Project.toml

Lines changed: 3 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -11,11 +11,13 @@ SparseArrays = "2f01184e-e22b-5df5-ae63-d93ebab69eaf"
1111

1212
[compat]
1313
MathOptInterface = "1"
14+
Juniper = "0.9"
1415
Optimization = "3.9"
1516
Reexport = "1.2"
1617
julia = "1"
1718

1819
[extras]
20+
Juniper = "2ddba703-00a4-53a7-87a5-e8b9971dde84"
1921
AmplNLWriter = "7c4d4715-977e-5154-bfe0-e096adeac482"
2022
Ipopt = "b6b21f68-93f8-5de0-b562-5493be1d77c9"
2123
Ipopt_jll = "9cc047cb-c261-5740-88fc-0cf96f7bdcc7"
@@ -25,4 +27,4 @@ Test = "8dfed614-e22c-5e08-85e1-65c5234f0b40"
2527
Zygote = "e88e6eb3-aa80-5325-afca-941959d7151f"
2628

2729
[targets]
28-
test = ["Ipopt", "AmplNLWriter", "Ipopt_jll", "ModelingToolkit", "NLopt", "Test", "Zygote"]
30+
test = ["Ipopt", "AmplNLWriter", "Ipopt_jll", "ModelingToolkit", "NLopt", "Juniper", "Test", "Zygote"]

lib/OptimizationMOI/src/OptimizationMOI.jl

Lines changed: 10 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -358,6 +358,16 @@ function SciMLBase.__solve(prob::OptimizationProblem,
358358
end
359359
end
360360
end
361+
362+
if prob.int !== nothing
363+
@assert eachindex(prob.int) == Base.OneTo(num_variables)
364+
for i in 1:num_variables
365+
if prob.int[i]
366+
MOI.add_constraint(opt_setup, θ[i], MOI.ZeroOne())
367+
end
368+
end
369+
end
370+
361371
if MOI.supports(opt_setup, MOI.VariablePrimalStart(), MOI.VariableIndex)
362372
@assert eachindex(prob.u0) == Base.OneTo(num_variables)
363373
for i in 1:num_variables

lib/OptimizationMOI/test/runtests.jl

Lines changed: 24 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,5 @@
11
using OptimizationMOI, Optimization, Ipopt, NLopt, Zygote, ModelingToolkit
2-
using AmplNLWriter, Ipopt_jll
2+
using AmplNLWriter, Ipopt_jll, Juniper
33
using Test
44

55
function _test_sparse_derivatives_hs071(backend, optimizer)
@@ -94,3 +94,26 @@ end
9494
_test_sparse_derivatives_hs071(backend, AmplNLWriter.Optimizer(Ipopt_jll.amplexe))
9595
end
9696
end end
97+
98+
@testset "MINLP" begin
99+
v = [1.0, 2.0, 4.0, 3.0]
100+
w = [5.0, 4.0, 3.0, 2.0]
101+
W = 4.0
102+
u0 = [0.0, 0.0, 0.0, 1.0]
103+
104+
optfun = OptimizationFunction((u, p) -> -v'u, cons = (res, u, p) -> res .= w'u,
105+
Optimization.AutoForwardDiff())
106+
107+
optprob = OptimizationProblem(optfun, u0; lb = zero.(u0), ub = one.(u0),
108+
int = ones(Bool, length(u0)),
109+
lcons = [-Inf;], ucons = [W;])
110+
111+
nl_solver = OptimizationMOI.MOI.OptimizerWithAttributes(Ipopt.Optimizer,
112+
"print_level" => 0)
113+
minlp_solver = OptimizationMOI.MOI.OptimizerWithAttributes(Juniper.Optimizer,
114+
"nl_solver" => nl_solver)
115+
116+
res = solve(optprob, minlp_solver)
117+
@test res.u == [0.0, 0.0, 1.0, 0.0]
118+
@test res.objective == -4.0
119+
end

0 commit comments

Comments
 (0)