Skip to content

Commit 1e39d52

Browse files
Merge pull request #284 from odow/od/fix-moi-sparsity
[OptimizationMOI] fix handling of sparse derivatives and add tests
2 parents bd05059 + ad8575e commit 1e39d52

File tree

2 files changed

+102
-35
lines changed

2 files changed

+102
-35
lines changed

lib/OptimizationMOI/src/OptimizationMOI.jl

Lines changed: 61 additions & 35 deletions
Original file line numberDiff line numberDiff line change
@@ -80,29 +80,51 @@ function MOI.eval_constraint_jacobian(moiproblem::MOIOptimizationProblem, j, x)
8080
)
8181
end
8282
moiproblem.f.cons_j(moiproblem.J, x)
83-
for i in eachindex(j)
84-
j[i] = moiproblem.J[i]
83+
if moiproblem.J isa SparseMatrixCSC
84+
nnz = nonzeros(moiproblem.J)
85+
@assert length(j) == length(nnz)
86+
for (i, Ji) in zip(eachindex(j), nnz)
87+
j[i] = Ji
88+
end
89+
else
90+
for i in eachindex(j)
91+
j[i] = moiproblem.J[i]
92+
end
8593
end
8694
return
8795
end
8896

89-
# Because the Hessian is symmetrical, we choose to store the upper-triangular
90-
# component. We also assume that it is dense.
9197
function MOI.hessian_lagrangian_structure(moiproblem::MOIOptimizationProblem)
92-
if moiproblem.H isa SparseMatrixCSC
98+
sparse_obj = moiproblem.H isa SparseMatrixCSC
99+
sparse_constraints = all(H -> H isa SparseMatrixCSC, moiproblem.cons_H)
100+
if !sparse_constraints && any(H -> H isa SparseMatrixCSC, moiproblem.cons_H)
101+
# Some constraint hessians are dense and some are sparse! :(
102+
error("Mix of sparse and dense constraint hessians are not supported")
103+
end
104+
N = length(moiproblem.u0)
105+
inds = if sparse_obj
93106
rows, cols, _ = findnz(moiproblem.H)
94-
inds = Tuple{Int,Int}[(i, j) for (i,j) in zip(rows, cols)]
95-
for ind in 1:length(moiproblem.cons_H)
96-
r,c,_ = findnz(moiproblem.cons_H[ind])
97-
for (i,j) in zip(r,c)
98-
push!(inds, (i,j))
107+
Tuple{Int,Int}[(i, j) for (i, j) in zip(rows, cols) if i <= j]
108+
else
109+
Tuple{Int,Int}[(row, col) for col in 1:N for row in 1:col]
110+
end
111+
if sparse_constraints
112+
for Hi in moiproblem.cons_H
113+
r, c, _ = findnz(Hi)
114+
for (i, j) in zip(r, c)
115+
if i <= j
116+
push!(inds, (i, j))
117+
end
99118
end
100119
end
101-
return inds
120+
elseif !sparse_obj
121+
# Performance optimization. If both are dense, no need to repeat
102122
else
103-
num_vars = length(moiproblem.u0)
104-
return Tuple{Int,Int}[(row, col) for col in 1:num_vars for row in 1:col]
123+
for col in 1:N, row in 1:col
124+
push!(inds, (row, col))
125+
end
105126
end
127+
return inds
106128
end
107129

108130
function MOI.eval_hessian_lagrangian(
@@ -112,43 +134,47 @@ function MOI.eval_hessian_lagrangian(
112134
σ,
113135
μ,
114136
) where {T}
115-
if iszero)
116-
fill!(h, zero(T))
117-
else
118-
moiproblem.f.hess(moiproblem.H, x)
119-
k = 0
120-
if moiproblem.H isa SparseMatrixCSC
121-
rows, cols, _ = findnz(moiproblem.H)
122-
for (i, j) in zip(rows, cols)
137+
fill!(h, zero(T))
138+
k = 0
139+
moiproblem.f.hess(moiproblem.H, x)
140+
sparse_objective = moiproblem.H isa SparseMatrixCSC
141+
if sparse_objective
142+
rows, cols, _ = findnz(moiproblem.H)
143+
for (i, j) in zip(rows, cols)
144+
if i <= j
123145
k += 1
124146
h[k] = σ * moiproblem.H[i, j]
125147
end
126-
else
127-
for i in 1:size(moiproblem.H, 1)
128-
for j in 1:i
129-
k += 1
130-
h[k] = σ * moiproblem.H[i, j]
131-
end
132-
end
148+
end
149+
else
150+
for i in 1:size(moiproblem.H, 1), j in 1:i
151+
k += 1
152+
h[k] = σ * moiproblem.H[i, j]
133153
end
134154
end
155+
# A count of the number of non-zeros in the objective Hessian is needed if
156+
# the constraints are dense.
157+
nnz_objective = k
135158
if !isempty(μ) && !all(iszero, μ)
136159
moiproblem.f.cons_h(moiproblem.cons_H, x)
137160
for (μi, Hi) in zip(μ, moiproblem.cons_H)
138-
k = 0
139161
if Hi isa SparseMatrixCSC
140162
rows, cols, _ = findnz(Hi)
141163
for (i, j) in zip(rows, cols)
142-
k += 1
143-
h[k] += μi * Hi[i, j]
144-
end
145-
else
146-
for i in 1:size(Hi, 1)
147-
for j in 1:i
164+
if i <= j
148165
k += 1
149166
h[k] += μi * Hi[i, j]
150167
end
151168
end
169+
else
170+
# The constraints are dense. We only store one copy of the
171+
# Hessian, so reset `k` to where it starts. That will be
172+
# `nnz_objective` if the objective is sprase, and `0` otherwise.
173+
k = sparse_objective ? nnz_objective : 0
174+
for i in 1:size(Hi, 1), j in 1:i
175+
k += 1
176+
h[k] += μi * Hi[i, j]
177+
end
152178
end
153179
end
154180
end

lib/OptimizationMOI/test/runtests.jl

Lines changed: 41 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,34 @@
11
using OptimizationMOI, Optimization, Ipopt, NLopt, Zygote, ModelingToolkit
22
using Test
33

4+
function _test_sparse_derivatives_hs071(backend)
5+
function objective(x, ::Any)
6+
return x[1] * x[4] * (x[1] + x[2] + x[3]) + x[3]
7+
end
8+
function constraints(x, ::Any)
9+
return [
10+
x[1] * x[2] * x[3] * x[4],
11+
x[1]^2 + x[2]^2 + x[3]^2 + x[4]^2,
12+
]
13+
end
14+
prob = OptimizationProblem(
15+
OptimizationFunction(objective, backend; cons = constraints),
16+
[1.0, 5.0, 5.0, 1.0];
17+
sense = Optimization.MinSense,
18+
lb = [1.0, 1.0, 1.0, 1.0],
19+
ub = [5.0, 5.0, 5.0, 5.0],
20+
lcons = [25.0, 40.0],
21+
ucons = [Inf, 40.0],
22+
)
23+
sol = solve(prob, Ipopt.Optimizer())
24+
@test isapprox(sol.minimum, 17.014017145179164; atol = 1e-6)
25+
x = [1.0, 4.7429996418092970, 3.8211499817883077, 1.3794082897556983]
26+
@test isapprox(sol.minimizer, x; atol = 1e-6)
27+
@test prod(sol.minimizer) >= 25.0 - 1e-6
28+
@test isapprox(sum(sol.minimizer.^2), 40.0; atol = 1e-6)
29+
return
30+
end
31+
432
@testset "OptimizationMOI.jl" begin
533
rosenbrock(x, p) = (p[1] - x[1])^2 + p[2] * (x[2] - x[1]^2)^2
634
x0 = zeros(2)
@@ -41,3 +69,16 @@ using Test
4169
sol = solve(prob, OptimizationMOI.MOI.OptimizerWithAttributes(Ipopt.Optimizer, "max_cpu_time" => 60.0))
4270
@test 10 * sol.minimum < l1
4371
end
72+
73+
@testset "backends" begin
74+
for backend in (
75+
Optimization.AutoModelingToolkit(false, false),
76+
Optimization.AutoModelingToolkit(true, false),
77+
Optimization.AutoModelingToolkit(false, true),
78+
Optimization.AutoModelingToolkit(true, true),
79+
)
80+
@testset "$backend" begin
81+
_test_sparse_derivatives_hs071(backend)
82+
end
83+
end
84+
end

0 commit comments

Comments
 (0)