Skip to content

Commit e92555c

Browse files
authored
Add more NLS tests + test for JuMP only models (#388)
* Add more NLS tests * test JuMP model * format and try to fix tests
1 parent 7deb3cb commit e92555c

File tree

10 files changed

+78
-53
lines changed

10 files changed

+78
-53
lines changed

src/ADNLPProblems/bard.jl

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -8,7 +8,8 @@ end
88
function bard(::Val{:nlp}; n::Int = default_nvar, type::Type{T} = Float64, kwargs...) where {T}
99
y = Rational{Int}[0.14 0.18 0.22 0.25 0.29 0.32 0.35 0.39 0.37 0.58 0.73 0.16 1.34 2.10 4.39]
1010
function f(x)
11-
return 1 // 2 * sum(y[i] - (x[1] + i / ((16 - i) * x[2] + min(i, 16 - i) * x[3])) for i = 1:15)
11+
return 1 // 2 *
12+
sum((y[i] - (x[1] + i / ((16 - i) * x[2] + min(i, 16 - i) * x[3])))^2 for i = 1:15)
1213
end
1314
x0 = ones(T, 3)
1415
return ADNLPModels.ADNLPModel(f, x0, name = "bard"; kwargs...)

src/ADNLPProblems/hs6.jl

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -23,7 +23,7 @@ end
2323

2424
function hs6(::Val{:nls}; n::Int = default_nvar, type::Type{T} = Float64, kwargs...) where {T}
2525
function F!(r, x)
26-
r[1] = 1 // 2 * (x[1] - 1)^2
26+
r[1] = (x[1] - 1)
2727
return r
2828
end
2929
function c!(cx, x)

src/ADNLPProblems/penalty1.jl

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -22,6 +22,6 @@ function penalty1(::Val{:nls}; n::Int = default_nvar, type::Type{T} = Float64, k
2222
r[n + 1] = sum(x[j]^2 for j = 1:n) - 1 // 4
2323
return r
2424
end
25-
x0 = ones(T, n)
25+
x0 = T[j for j = 1:n]
2626
return ADNLPModels.ADNLSModel!(F!, x0, n + 1, name = "penalty1-nls"; kwargs...)
2727
end

src/ADNLPProblems/watson.jl

Lines changed: 3 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -20,10 +20,11 @@ function watson(::Val{:nlp}; n::Int = default_nvar, type::Type{T} = Float64, kwa
2020
sum((j - 1) * x[j] * x[1]^(j - 2) for j = 2:n) -
2121
sum(x[j] * x[1]^(j - 1) for j = 1:n)^2 - 1
2222
)^2 +
23-
1 // 2 * (
23+
1 // 2 *
24+
(
2425
sum((j - 1) * x[j] * (x[2] - x[1]^2 - 1)^(j - 2) for j = 2:n) -
2526
sum(x[j] * (x[2] - x[1]^2 - 1)^(j - 1) for j = 1:n)^2 - 1
26-
)
27+
)^2
2728
end
2829
x0 = zeros(T, n)
2930
return ADNLPModels.ADNLPModel(f, x0, name = "watson"; kwargs...)

src/PureJuMP/bard.jl

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -23,7 +23,7 @@ function bard(args...; n::Int = default_nvar, m::Int = 2n, kwargs...)
2323
@objective(
2424
nlp,
2525
Min,
26-
0.5 * sum(y[i] - (x[1] + i / ((16 - i) * x[2] + min(i, 16 - i) * x[3])) for i = 1:15)
26+
0.5 * sum((y[i] - (x[1] + i / ((16 - i) * x[2] + min(i, 16 - i) * x[3])))^2 for i = 1:15)
2727
)
2828

2929
return nlp

src/PureJuMP/hs6.jl

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -19,7 +19,7 @@ function hs6(args...; kwargs...)
1919
x0 = [-1.2, 1]
2020
@variable(nlp, x[i = 1:2], start = x0[i])
2121

22-
@objective(nlp, Min, 0.5 * (1 - x[1])^2)
22+
@objective(nlp, Min, 0.5 * (x[1] - 1)^2)
2323

2424
@constraint(nlp, 10 * (x[2] - x[1]^2) == 0)
2525

src/PureJuMP/watson.jl

Lines changed: 3 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -37,10 +37,11 @@ function watson(args...; n::Int = default_nvar, kwargs...)
3737
(
3838
sum((j - 1) * x[j] * x[1]^(j - 2) for j = 2:n) - sum(x[j] * x[1]^(j - 1) for j = 1:n)^2 - 1
3939
)^2 +
40-
0.5 * (
40+
0.5 *
41+
(
4142
sum((j - 1) * x[j] * (x[2] - x[1]^2 - 1)^(j - 2) for j = 2:n) -
4243
sum(x[j] * (x[2] - x[1]^2 - 1)^(j - 1) for j = 1:n)^2 - 1
43-
)
44+
)^2
4445
)
4546

4647
return nlp

test/runtests.jl

Lines changed: 25 additions & 26 deletions
Original file line numberDiff line numberDiff line change
@@ -14,17 +14,22 @@ addprocs(np - 1)
1414
[n for n in names(mod) if isdefined(mod, n)]
1515
end
1616

17-
const list_problems =
17+
@everywhere const list_problems =
1818
setdiff(union(defined_names(ADNLPProblems), defined_names(PureJuMP)), [:PureJuMP, :ADNLPProblems])
1919

20+
@testset "Test that all problems have a meta" begin
21+
@test sort(list_problems) == sort(Symbol.(OptimizationProblems.meta[!, :name]))
22+
end
23+
2024
# The problems included should be carefully argumented and issues
2125
# to create them added.
2226
# TODO: tests are limited for JuMP-only problems
23-
const list_problems_not_ADNLPProblems =
27+
@everywhere const list_problems_not_ADNLPProblems =
2428
Symbol[:catmix, :gasoil, :glider, :methanol, :minsurf, :pinene, :rocket, :steering, :torsion]
25-
const list_problems_ADNLPProblems = setdiff(list_problems, list_problems_not_ADNLPProblems)
26-
const list_problems_not_PureJuMP = Symbol[]
27-
const list_problems_PureJuMP = setdiff(list_problems, list_problems_not_PureJuMP)
29+
@everywhere const list_problems_ADNLPProblems =
30+
setdiff(list_problems, list_problems_not_ADNLPProblems)
31+
@everywhere const list_problems_not_PureJuMP = Symbol[]
32+
@everywhere const list_problems_PureJuMP = setdiff(list_problems, list_problems_not_PureJuMP)
2833

2934
include("test-defined-problems.jl")
3035
include("test-utils.jl")
@@ -45,7 +50,7 @@ end
4550
error("Problem $(prob) is not defined in $mod on pid $(myid()).")
4651
end
4752
ctor = getfield(mod, prob)
48-
return MathOptNLPModel(ctor(; kwargs...))
53+
return MathOptNLPModel(ctor(; kwargs...); name = "$prob")
4954
end
5055

5156
@everywhere function make_ad_nlp(prob::Symbol; kwargs...)
@@ -57,6 +62,8 @@ end
5762
return ctor(matrix_free = true; kwargs...)
5863
end
5964

65+
include("test-in-place-residual.jl")
66+
6067
@everywhere function test_one_problem(prob::Symbol)
6168
pb = string(prob)
6269

@@ -75,41 +82,33 @@ end
7582

7683
nlp_ad = timed_info("Instantiating $(pb)", make_nlp, prob)
7784

78-
@test nlp_ad.meta.name == pb
85+
@testset "Sanity check (name, obj)" begin
86+
@test nlp_ad.meta.name == pb
87+
@test !isnothing(obj(nlp_ad, nlp_ad.meta.x0))
88+
end
7989

80-
if pb in meta[(meta.contype .== :quadratic) .| (meta.contype .== :general), :name]
90+
if (typeof(nlp_ad) <: ADNLPModels.AbstractADNLPModel) &&
91+
(pb in meta[(meta.contype .== :quadratic) .| (meta.contype .== :general), :name])
8192
@testset "Test In-place Nonlinear Constraints for AD-$prob" begin
8293
test_in_place_constraints(prob, nlp_ad)
8394
end
8495
end
8596

86-
@testset "Test multi-precision ADNLPProblems for $prob" begin
87-
test_multi_precision(prob, nlp_ad)
88-
end
89-
90-
if pb in meta[meta.objtype .== :least_squares, :name]
91-
@testset "Test Nonlinear Least Squares for $prob" begin
92-
test_in_place_residual(prob)
97+
if typeof(nlp_ad) <: ADNLPModels.AbstractADNLPModel
98+
@testset "Test multi-precision ADNLPProblems for $prob" begin
99+
test_multi_precision(prob, nlp_ad)
93100
end
94101
end
95102

96-
model = begin
97-
mod = PureJuMP
98-
if isdefined(mod, prob)
99-
getfield(mod, prob)(n = ndef)
100-
else
101-
nothing
102-
end
103-
end
104-
if !isnothing(model)
103+
if mod in intersect(list_problems_PureJuMP, list_problems_ADNLPProblems)
105104
@testset "Test problems compatibility for $prob" begin
106-
nlp_jump = MathOptNLPModel(model)
105+
nlp_jump = make_jump_nlp(prob; n = ndef)
107106
test_compatibility(prob, nlp_jump, nlp_ad, ndef)
108107
end
109108
end
110109
end
111110

112-
pmap(test_one_problem, list_problems_ADNLPProblems)
111+
pmap(test_one_problem, list_problems)
113112

114113
include("test-scalable.jl")
115114

test/test-in-place-residual.jl

Lines changed: 41 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,41 @@
1+
@everywhere function test_in_place_residual(prob::Symbol)
2+
nlp = make_ad_nlp(prob; use_nls = false)
3+
@test typeof(nlp) <: ADNLPModels.ADNLPModel
4+
nls = make_ad_nlp(prob; use_nls = true)
5+
@test typeof(nls) <: ADNLPModels.ADNLSModel
6+
return test_in_place_residual(prob, nlp, nls)
7+
end
8+
9+
@everywhere function test_in_place_residual(
10+
prob::Symbol,
11+
nlp::AbstractNLPModel,
12+
nls::AbstractNLSModel,
13+
)
14+
@testset "Test in-place residual $prob" begin
15+
x = nls.meta.x0
16+
Fx = similar(x, nls.nls_meta.nequ)
17+
pb = String(prob)
18+
if VERSION v"1.7" && !occursin("palmer", pb) && (pb != "watson") # palmer residual allocate
19+
@allocated residual!(nls, x, Fx)
20+
@test (@allocated residual!(nls, x, Fx)) == 0
21+
end
22+
m = OptimizationProblems.eval(Meta.parse("get_$(prob)_nls_nequ"))()
23+
@test nls.nls_meta.nequ == m
24+
end
25+
26+
@testset "Compare NLS with NLP $prob: x0 and obj are the same." begin
27+
x0 = nlp.meta.x0
28+
@test x0 == nls.meta.x0
29+
nlp_fx = obj(nlp, x0)
30+
nls_fx = obj(nls, x0)
31+
are_almost_same = (nlp_fx nls_fx) | (nlp_fx 2 * nls_fx)
32+
if !(are_almost_same)
33+
@info "$prob : NLS $(nls_fx) ≈ NLP $(nlp_fx)"
34+
end
35+
@test are_almost_same
36+
end
37+
end
38+
39+
nls_name_list =
40+
intersect(Symbol.(meta[meta.objtype .== :least_squares, :name]), list_problems_ADNLPProblems)
41+
pmap(test_in_place_residual, nls_name_list)

test/test-utils.jl

Lines changed: 0 additions & 18 deletions
Original file line numberDiff line numberDiff line change
@@ -46,24 +46,6 @@ end
4646
@test ncon == m
4747
end
4848

49-
@everywhere function test_in_place_residual(prob::Symbol)
50-
nls = OptimizationProblems.ADNLPProblems.eval(prob)(use_nls = true)
51-
@test typeof(nls) <: ADNLPModels.ADNLSModel
52-
return test_in_place_residual(prob, nls)
53-
end
54-
55-
@everywhere function test_in_place_residual(prob::Symbol, nls::AbstractNLSModel)
56-
x = nls.meta.x0
57-
Fx = similar(x, nls.nls_meta.nequ)
58-
pb = String(prob)
59-
if VERSION v"1.7" && !occursin("palmer", pb) && (pb != "watson") # palmer residual allocate
60-
@allocated residual!(nls, x, Fx)
61-
@test (@allocated residual!(nls, x, Fx)) == 0
62-
end
63-
m = OptimizationProblems.eval(Meta.parse("get_$(prob)_nls_nequ"))()
64-
@test nls.nls_meta.nequ == m
65-
end
66-
6749
@everywhere function test_compatibility(prob::Symbol, ndef::Integer = ndef)
6850
prob_fn = eval(Meta.parse("PureJuMP.$(prob)"))
6951
model = prob_fn(n = ndef)

0 commit comments

Comments
 (0)