Skip to content

Commit ea677e1

Browse files
Merge pull request #218 from odow/od/moi1
Update to MathOptInterface v1.0
2 parents fa5a8bd + 72155b6 commit ea677e1

File tree

2 files changed

+143
-90
lines changed

2 files changed

+143
-90
lines changed

src/solve/moi.jl

Lines changed: 142 additions & 89 deletions
Original file line numberDiff line numberDiff line change
@@ -4,22 +4,95 @@ const MOI = MathOptInterface
44
struct MOIOptimizationProblem{T,F<:OptimizationFunction,uType,P} <: MOI.AbstractNLPEvaluator
55
f::F
66
u0::uType
7-
p::P
7+
p::P
88
J::Matrix{T}
99
H::Matrix{T}
1010
cons_H::Vector{Matrix{T}}
1111
end
1212

13-
MOI.eval_objective(moiproblem::MOIOptimizationProblem, x) = moiproblem.f(x, moiproblem.p)
13+
function MOIOptimizationProblem(prob::OptimizationProblem)
14+
num_cons = prob.ucons === nothing ? 0 : length(prob.ucons)
15+
f = instantiate_function(prob.f, prob.u0, prob.f.adtype, prob.p, num_cons)
16+
T = eltype(prob.u0)
17+
n = length(prob.u0)
18+
return MOIOptimizationProblem(
19+
f,
20+
prob.u0,
21+
prob.p,
22+
zeros(T, num_cons, n),
23+
zeros(T, n, n),
24+
Matrix{T}[zeros(T, n, n) for i in 1:num_cons],
25+
)
26+
end
27+
28+
MOI.features_available(::MOIOptimizationProblem) = [:Grad, :Hess, :Jac]
29+
30+
function MOI.initialize(
31+
moiproblem::MOIOptimizationProblem,
32+
requested_features::Vector{Symbol},
33+
)
34+
available_features = MOI.features_available(moiproblem)
35+
for feat in requested_features
36+
if !(feat in available_features)
37+
error("Unsupported feature $feat")
38+
# TODO: implement Jac-vec and Hess-vec products
39+
# for solvers that need them
40+
end
41+
end
42+
return
43+
end
44+
45+
function MOI.eval_objective(moiproblem::MOIOptimizationProblem, x)
46+
return moiproblem.f(x, moiproblem.p)
47+
end
1448

15-
MOI.eval_constraint(moiproblem::MOIOptimizationProblem, g, x) = g .= moiproblem.f.cons(x)
49+
function MOI.eval_constraint(moiproblem::MOIOptimizationProblem, g, x)
50+
g .= moiproblem.f.cons(x)
51+
return
52+
end
1653

17-
MOI.eval_objective_gradient(moiproblem::MOIOptimizationProblem, G, x) = moiproblem.f.grad(G, x)
54+
function MOI.eval_objective_gradient(moiproblem::MOIOptimizationProblem, G, x)
55+
moiproblem.f.grad(G, x)
56+
return
57+
end
1858

19-
function MOI.eval_hessian_lagrangian(moiproblem::MOIOptimizationProblem{T}, h, x, σ, μ) where {T}
59+
# This structure assumes the calculation of moiproblem.J is dense.
60+
function MOI.jacobian_structure(moiproblem::MOIOptimizationProblem)
61+
rows, cols = size(moiproblem.J)
62+
return Tuple{Int,Int}[(i, j) for j in 1:cols for i in 1:rows]
63+
end
64+
65+
function MOI.eval_constraint_jacobian(moiproblem::MOIOptimizationProblem, j, x)
66+
if isempty(j)
67+
return
68+
elseif moiproblem.f.cons_j === nothing
69+
error(
70+
"Use OptimizationFunction to pass the derivatives or " *
71+
"automatically generate them with one of the autodiff backends",
72+
)
73+
end
74+
moiproblem.f.cons_j(moiproblem.J, x)
75+
for i in eachindex(j)
76+
j[i] = moiproblem.J[i]
77+
end
78+
return
79+
end
80+
81+
# Because the Hessian is symmetrical, we choose to store the upper-triangular
82+
# component. We also assume that it is dense.
83+
function MOI.hessian_lagrangian_structure(moiproblem::MOIOptimizationProblem)
84+
num_vars = length(moiproblem.u0)
85+
return Tuple{Int,Int}[(row, col) for col in 1:num_vars for row in 1:col]
86+
end
87+
88+
function MOI.eval_hessian_lagrangian(
89+
moiproblem::MOIOptimizationProblem{T},
90+
h,
91+
x,
92+
σ,
93+
μ,
94+
) where {T}
2095
n = length(moiproblem.u0)
21-
a = zeros(n, n)
22-
moiproblem.f.hess(a, x)
2396
if iszero(σ)
2497
fill!(h, zero(T))
2598
else
@@ -47,126 +120,99 @@ function MOI.eval_hessian_lagrangian(moiproblem::MOIOptimizationProblem{T}, h, x
47120
return
48121
end
49122

50-
function MOI.eval_constraint_jacobian(moiproblem::MOIOptimizationProblem, j, x)
51-
isempty(j) && return
52-
moiproblem.f.cons_j === nothing && error("Use OptimizationFunction to pass the derivatives or automatically generate them with one of the autodiff backends")
53-
n = length(moiproblem.u0)
54-
moiproblem.f.cons_j(moiproblem.J, x)
55-
for i in eachindex(j)
56-
j[i] = moiproblem.J[i]
57-
end
58-
end
59-
60-
function MOI.jacobian_structure(moiproblem::MOIOptimizationProblem)
61-
return Tuple{Int,Int}[(con, var) for con in 1:size(moiproblem.J,1) for var in 1:size(moiproblem.J,2)]
62-
end
63-
64-
function MOI.hessian_lagrangian_structure(moiproblem::MOIOptimizationProblem)
65-
return Tuple{Int,Int}[(row, col) for col in 1:length(moiproblem.u0) for row in 1:col]
66-
end
67-
68-
function MOI.initialize(moiproblem::MOIOptimizationProblem, requested_features::Vector{Symbol})
69-
for feat in requested_features
70-
if !(feat in MOI.features_available(moiproblem))
71-
error("Unsupported feature $feat")
72-
# TODO: implement Jac-vec and Hess-vec products
73-
# for solvers that need them
74-
end
75-
end
76-
end
77-
78-
MOI.features_available(moiproblem::MOIOptimizationProblem) = [:Grad, :Hess, :Jac]
79-
80-
function make_moi_problem(prob::OptimizationProblem)
81-
num_cons = prob.ucons === nothing ? 0 : length(prob.ucons)
82-
f = instantiate_function(prob.f,prob.u0,prob.f.adtype,prob.p,num_cons)
83-
T = eltype(prob.u0)
84-
n = length(prob.u0)
85-
moiproblem = MOIOptimizationProblem(f,prob.u0,prob.p,zeros(T,num_cons,n),zeros(T,n,n),Matrix{T}[zeros(T,n,n) for i in 1:num_cons])
86-
return moiproblem
87-
end
123+
_create_new_optimizer(opt::MOI.AbstractOptimizer) = opt
124+
_create_new_optimizer(opt::MOI.OptimizerWithAttributes) = MOI.instantiate(opt)
88125

89-
function __map_optimizer_args(prob::OptimizationProblem, opt::Union{MOI.AbstractOptimizer, MOI.OptimizerWithAttributes};
126+
function __map_optimizer_args(
127+
prob::OptimizationProblem,
128+
opt::Union{MOI.AbstractOptimizer, MOI.OptimizerWithAttributes};
90129
maxiters::Union{Number, Nothing}=nothing,
91130
maxtime::Union{Number, Nothing}=nothing,
92-
abstol::Union{Number, Nothing}=nothing,
131+
abstol::Union{Number, Nothing}=nothing,
93132
reltol::Union{Number, Nothing}=nothing,
94-
kwargs...)
95-
96-
mapped_args = Vector{Pair{String, Any}}[]
97-
mapped_args = [mapped_args..., [Pair(string(j.first),j.second) for j = kwargs]...]
98-
99-
if isa(opt, MOI.AbstractOptimizer)
100-
if length(mapped_args) > 0
101-
opt = MOI.OptimizerWithAttributes(typeof(opt), mapped_args...)
102-
else
103-
opt = typeof(opt)
104-
end
133+
kwargs...,
134+
)
135+
optimizer = _create_new_optimizer(opt)
136+
for (key, value) in kwargs
137+
MOI.set(optimizer, MOI.RawOptimizerAttribute("$(key)"), value)
105138
end
106-
107-
optimizer = MOI.instantiate(opt)
108-
109139
if !isnothing(maxtime)
110140
MOI.set(optimizer, MOI.TimeLimitSec(), maxtime)
111141
end
112-
113142
if !isnothing(reltol)
114143
@warn "common reltol argument is currently not used by $(optimizer). Set tolerances via optimizer specific keyword aguments."
115144
end
116-
117145
if !isnothing(abstol)
118146
@warn "common abstol argument is currently not used by $(optimizer). Set tolerances via optimizer specific keyword aguments."
119147
end
120-
121148
if !isnothing(maxiters)
122149
@warn "common maxiters argument is currently not used by $(optimizer). Set number of interations via optimizer specific keyword aguments."
123150
end
124-
125151
return optimizer
126152
end
127153

128-
function __solve(prob::OptimizationProblem, opt::Union{MOI.AbstractOptimizer, MOI.OptimizerWithAttributes};
154+
function __solve(
155+
prob::OptimizationProblem,
156+
opt::Union{MOI.AbstractOptimizer, MOI.OptimizerWithAttributes};
129157
maxiters::Union{Number, Nothing}=nothing,
130158
maxtime::Union{Number, Nothing}=nothing,
131-
abstol::Union{Number, Nothing}=nothing,
159+
abstol::Union{Number, Nothing}=nothing,
132160
reltol::Union{Number, Nothing}=nothing,
133-
kwargs...)
134-
161+
kwargs...,
162+
)
135163
maxiters = _check_and_convert_maxiters(maxiters)
136164
maxtime = _check_and_convert_maxtime(maxtime)
137-
138-
opt_setup = __map_optimizer_args(prob, opt; abstol=abstol, reltol=reltol, maxiters=maxiters, maxtime=maxtime, kwargs...)
139-
165+
opt_setup = __map_optimizer_args(
166+
prob,
167+
opt;
168+
abstol=abstol,
169+
reltol=reltol,
170+
maxiters=maxiters,
171+
maxtime=maxtime,
172+
kwargs...,
173+
)
140174
num_variables = length(prob.u0)
141-
θ = MOI.add_variables(opt_setup, num_variables)
142-
if prob.lb !== nothing
175+
θ = MOI.add_variables(opt_setup, num_variables)
176+
if prob.lb !== nothing
143177
@assert eachindex(prob.lb) == Base.OneTo(num_variables)
144-
for i in 1:num_variables
145-
MOI.add_constraint(opt_setup, MOI.SingleVariable(θ[i]), MOI.GreaterThan(prob.lb[i]))
178+
for i in 1:num_variables
179+
if prob.lb[i] > -Inf
180+
MOI.add_constraint(opt_setup, θ[i], MOI.GreaterThan(prob.lb[i]))
181+
end
146182
end
147183
end
148-
if prob.ub !== nothing
184+
if prob.ub !== nothing
149185
@assert eachindex(prob.ub) == Base.OneTo(num_variables)
150-
for i in 1:num_variables
151-
MOI.add_constraint(opt_setup, MOI.SingleVariable(θ[i]), MOI.LessThan(prob.ub[i]))
186+
for i in 1:num_variables
187+
if prob.ub[i] < Inf
188+
MOI.add_constraint(opt_setup, θ[i], MOI.LessThan(prob.ub[i]))
189+
end
152190
end
153191
end
154-
@assert eachindex(prob.u0) == Base.OneTo(num_variables)
155-
for i in 1:num_variables
156-
MOI.set(opt_setup, MOI.VariablePrimalStart(), θ[i], prob.u0[i])
157-
end
158-
MOI.set(opt_setup, MOI.ObjectiveSense(), prob.sense === MaxSense ? MOI.MAX_SENSE : MOI.MIN_SENSE)
192+
if MOI.supports(opt_setup, MOI.VariablePrimalStart(), MOI.VariableIndex)
193+
@assert eachindex(prob.u0) == Base.OneTo(num_variables)
194+
for i in 1:num_variables
195+
MOI.set(opt_setup, MOI.VariablePrimalStart(), θ[i], prob.u0[i])
196+
end
197+
end
198+
MOI.set(
199+
opt_setup,
200+
MOI.ObjectiveSense(),
201+
prob.sense === MaxSense ? MOI.MAX_SENSE : MOI.MIN_SENSE,
202+
)
159203
if prob.lcons === nothing
160204
@assert prob.ucons === nothing
161205
con_bounds = MOI.NLPBoundsPair[]
162206
else
163207
@assert prob.ucons !== nothing
164208
con_bounds = MOI.NLPBoundsPair.(prob.lcons, prob.ucons)
165209
end
166-
MOI.set(opt_setup, MOI.NLPBlock(), MOI.NLPBlockData(con_bounds, make_moi_problem(prob), true))
167-
168-
MOI.optimize!(opt_setup)
169-
210+
MOI.set(
211+
opt_setup,
212+
MOI.NLPBlock(),
213+
MOI.NLPBlockData(con_bounds, MOIOptimizationProblem(prob), true),
214+
)
215+
MOI.optimize!(opt_setup)
170216
if MOI.get(opt_setup, MOI.ResultCount()) >= 1
171217
minimizer = MOI.get(opt_setup, MOI.VariablePrimal(), θ)
172218
minimum = MOI.get(opt_setup, MOI.ObjectiveValue())
@@ -176,5 +222,12 @@ function __solve(prob::OptimizationProblem, opt::Union{MOI.AbstractOptimizer, MO
176222
minimum = NaN
177223
opt_ret= :Default
178224
end
179-
SciMLBase.build_solution(prob, opt, minimizer, minimum; original=opt_setup, retcode=opt_ret)
225+
return SciMLBase.build_solution(
226+
prob,
227+
opt,
228+
minimizer,
229+
minimum;
230+
original=opt_setup,
231+
retcode=opt_ret,
232+
)
180233
end

test/Project.toml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -48,7 +48,7 @@ ForwardDiff = ">= 0.10.19"
4848
GCMAES = ">= 0.1.25"
4949
Ipopt = ">= 0.7.0"
5050
IterTools = ">= 1.3.0"
51-
MathOptInterface = ">= 0.9.22"
51+
MathOptInterface = ">= 1"
5252
Metaheuristics = ">=3.0.2"
5353
ModelingToolkit = ">= 6.4.7"
5454
MultistartOptimization = ">= 0.1.2"

0 commit comments

Comments
 (0)