Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
3 changes: 2 additions & 1 deletion src/ADNLPProblems/bard.jl
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,8 @@ end
function bard(::Val{:nlp}; n::Int = default_nvar, type::Type{T} = Float64, kwargs...) where {T}
y = Rational{Int}[0.14 0.18 0.22 0.25 0.29 0.32 0.35 0.39 0.37 0.58 0.73 0.16 1.34 2.10 4.39]
function f(x)
return 1 // 2 * sum(y[i] - (x[1] + i / ((16 - i) * x[2] + min(i, 16 - i) * x[3])) for i = 1:15)
return 1 // 2 *
sum((y[i] - (x[1] + i / ((16 - i) * x[2] + min(i, 16 - i) * x[3])))^2 for i = 1:15)
end
x0 = ones(T, 3)
return ADNLPModels.ADNLPModel(f, x0, name = "bard"; kwargs...)
Expand Down
2 changes: 1 addition & 1 deletion src/ADNLPProblems/hs6.jl
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,7 @@ end

function hs6(::Val{:nls}; n::Int = default_nvar, type::Type{T} = Float64, kwargs...) where {T}
function F!(r, x)
r[1] = 1 // 2 * (x[1] - 1)^2
r[1] = (x[1] - 1)
return r
end
function c!(cx, x)
Expand Down
2 changes: 1 addition & 1 deletion src/ADNLPProblems/penalty1.jl
Original file line number Diff line number Diff line change
Expand Up @@ -22,6 +22,6 @@ function penalty1(::Val{:nls}; n::Int = default_nvar, type::Type{T} = Float64, k
r[n + 1] = sum(x[j]^2 for j = 1:n) - 1 // 4
return r
end
x0 = ones(T, n)
x0 = T[j for j = 1:n]
return ADNLPModels.ADNLSModel!(F!, x0, n + 1, name = "penalty1-nls"; kwargs...)
end
5 changes: 3 additions & 2 deletions src/ADNLPProblems/watson.jl
Original file line number Diff line number Diff line change
Expand Up @@ -20,10 +20,11 @@ function watson(::Val{:nlp}; n::Int = default_nvar, type::Type{T} = Float64, kwa
sum((j - 1) * x[j] * x[1]^(j - 2) for j = 2:n) -
sum(x[j] * x[1]^(j - 1) for j = 1:n)^2 - 1
)^2 +
1 // 2 * (
1 // 2 *
(
sum((j - 1) * x[j] * (x[2] - x[1]^2 - 1)^(j - 2) for j = 2:n) -
sum(x[j] * (x[2] - x[1]^2 - 1)^(j - 1) for j = 1:n)^2 - 1
)
)^2
end
x0 = zeros(T, n)
return ADNLPModels.ADNLPModel(f, x0, name = "watson"; kwargs...)
Expand Down
2 changes: 1 addition & 1 deletion src/PureJuMP/bard.jl
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,7 @@ function bard(args...; n::Int = default_nvar, m::Int = 2n, kwargs...)
@objective(
nlp,
Min,
0.5 * sum(y[i] - (x[1] + i / ((16 - i) * x[2] + min(i, 16 - i) * x[3])) for i = 1:15)
0.5 * sum((y[i] - (x[1] + i / ((16 - i) * x[2] + min(i, 16 - i) * x[3])))^2 for i = 1:15)
)

return nlp
Expand Down
2 changes: 1 addition & 1 deletion src/PureJuMP/hs6.jl
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,7 @@ function hs6(args...; kwargs...)
x0 = [-1.2, 1]
@variable(nlp, x[i = 1:2], start = x0[i])

@objective(nlp, Min, 0.5 * (1 - x[1])^2)
@objective(nlp, Min, 0.5 * (x[1] - 1)^2)

@constraint(nlp, 10 * (x[2] - x[1]^2) == 0)

Expand Down
5 changes: 3 additions & 2 deletions src/PureJuMP/watson.jl
Original file line number Diff line number Diff line change
Expand Up @@ -37,10 +37,11 @@ function watson(args...; n::Int = default_nvar, kwargs...)
(
sum((j - 1) * x[j] * x[1]^(j - 2) for j = 2:n) - sum(x[j] * x[1]^(j - 1) for j = 1:n)^2 - 1
)^2 +
0.5 * (
0.5 *
(
sum((j - 1) * x[j] * (x[2] - x[1]^2 - 1)^(j - 2) for j = 2:n) -
sum(x[j] * (x[2] - x[1]^2 - 1)^(j - 1) for j = 1:n)^2 - 1
)
)^2
)

return nlp
Expand Down
51 changes: 25 additions & 26 deletions test/runtests.jl
Original file line number Diff line number Diff line change
Expand Up @@ -14,17 +14,22 @@ addprocs(np - 1)
[n for n in names(mod) if isdefined(mod, n)]
end

const list_problems =
@everywhere const list_problems =
setdiff(union(defined_names(ADNLPProblems), defined_names(PureJuMP)), [:PureJuMP, :ADNLPProblems])

@testset "Test that all problems have a meta" begin
@test sort(list_problems) == sort(Symbol.(OptimizationProblems.meta[!, :name]))
end

# The problems included should be carefully argumented and issues
# to create them added.
# TODO: tests are limited for JuMP-only problems
const list_problems_not_ADNLPProblems =
@everywhere const list_problems_not_ADNLPProblems =
Symbol[:catmix, :gasoil, :glider, :methanol, :minsurf, :pinene, :rocket, :steering, :torsion]
const list_problems_ADNLPProblems = setdiff(list_problems, list_problems_not_ADNLPProblems)
const list_problems_not_PureJuMP = Symbol[]
const list_problems_PureJuMP = setdiff(list_problems, list_problems_not_PureJuMP)
@everywhere const list_problems_ADNLPProblems =
setdiff(list_problems, list_problems_not_ADNLPProblems)
@everywhere const list_problems_not_PureJuMP = Symbol[]
@everywhere const list_problems_PureJuMP = setdiff(list_problems, list_problems_not_PureJuMP)

include("test-defined-problems.jl")
include("test-utils.jl")
Expand All @@ -45,7 +50,7 @@ end
error("Problem $(prob) is not defined in $mod on pid $(myid()).")
end
ctor = getfield(mod, prob)
return MathOptNLPModel(ctor(; kwargs...))
return MathOptNLPModel(ctor(; kwargs...); name = "$prob")
end

@everywhere function make_ad_nlp(prob::Symbol; kwargs...)
Expand All @@ -57,6 +62,8 @@ end
return ctor(matrix_free = true; kwargs...)
end

include("test-in-place-residual.jl")

@everywhere function test_one_problem(prob::Symbol)
pb = string(prob)

Expand All @@ -75,41 +82,33 @@ end

nlp_ad = timed_info("Instantiating $(pb)", make_nlp, prob)

@test nlp_ad.meta.name == pb
@testset "Sanity check (name, obj)" begin
@test nlp_ad.meta.name == pb
@test !isnothing(obj(nlp_ad, nlp_ad.meta.x0))
end

if pb in meta[(meta.contype .== :quadratic) .| (meta.contype .== :general), :name]
if (typeof(nlp_ad) <: ADNLPModels.AbstractADNLPModel) &&
(pb in meta[(meta.contype .== :quadratic) .| (meta.contype .== :general), :name])
@testset "Test In-place Nonlinear Constraints for AD-$prob" begin
test_in_place_constraints(prob, nlp_ad)
end
end

@testset "Test multi-precision ADNLPProblems for $prob" begin
test_multi_precision(prob, nlp_ad)
end

if pb in meta[meta.objtype .== :least_squares, :name]
@testset "Test Nonlinear Least Squares for $prob" begin
test_in_place_residual(prob)
if typeof(nlp_ad) <: ADNLPModels.AbstractADNLPModel
@testset "Test multi-precision ADNLPProblems for $prob" begin
test_multi_precision(prob, nlp_ad)
end
end

model = begin
mod = PureJuMP
if isdefined(mod, prob)
getfield(mod, prob)(n = ndef)
else
nothing
end
end
if !isnothing(model)
if mod in intersect(list_problems_PureJuMP, list_problems_ADNLPProblems)
@testset "Test problems compatibility for $prob" begin
nlp_jump = MathOptNLPModel(model)
nlp_jump = make_jump_nlp(prob; n = ndef)
test_compatibility(prob, nlp_jump, nlp_ad, ndef)
end
end
end

pmap(test_one_problem, list_problems_ADNLPProblems)
pmap(test_one_problem, list_problems)

include("test-scalable.jl")

Expand Down
41 changes: 41 additions & 0 deletions test/test-in-place-residual.jl
Original file line number Diff line number Diff line change
@@ -0,0 +1,41 @@
@everywhere function test_in_place_residual(prob::Symbol)
nlp = make_ad_nlp(prob; use_nls = false)
@test typeof(nlp) <: ADNLPModels.ADNLPModel
nls = make_ad_nlp(prob; use_nls = true)
@test typeof(nls) <: ADNLPModels.ADNLSModel
return test_in_place_residual(prob, nlp, nls)
end

@everywhere function test_in_place_residual(
prob::Symbol,
nlp::AbstractNLPModel,
nls::AbstractNLSModel,
)
@testset "Test in-place residual $prob" begin
x = nls.meta.x0
Fx = similar(x, nls.nls_meta.nequ)
pb = String(prob)
if VERSION ≥ v"1.7" && !occursin("palmer", pb) && (pb != "watson") # palmer residual allocate
@allocated residual!(nls, x, Fx)
@test (@allocated residual!(nls, x, Fx)) == 0
end
m = OptimizationProblems.eval(Meta.parse("get_$(prob)_nls_nequ"))()
@test nls.nls_meta.nequ == m
end

@testset "Compare NLS with NLP $prob: x0 and obj are the same." begin
x0 = nlp.meta.x0
@test x0 == nls.meta.x0
nlp_fx = obj(nlp, x0)
nls_fx = obj(nls, x0)
are_almost_same = (nlp_fx ≈ nls_fx) | (nlp_fx ≈ 2 * nls_fx)
if !(are_almost_same)
@info "$prob : NLS $(nls_fx) ≈ NLP $(nlp_fx)"
end
@test are_almost_same
end
end

nls_name_list =
intersect(Symbol.(meta[meta.objtype .== :least_squares, :name]), list_problems_ADNLPProblems)
pmap(test_in_place_residual, nls_name_list)
18 changes: 0 additions & 18 deletions test/test-utils.jl
Original file line number Diff line number Diff line change
Expand Up @@ -46,24 +46,6 @@ end
@test ncon == m
end

@everywhere function test_in_place_residual(prob::Symbol)
nls = OptimizationProblems.ADNLPProblems.eval(prob)(use_nls = true)
@test typeof(nls) <: ADNLPModels.ADNLSModel
return test_in_place_residual(prob, nls)
end

@everywhere function test_in_place_residual(prob::Symbol, nls::AbstractNLSModel)
x = nls.meta.x0
Fx = similar(x, nls.nls_meta.nequ)
pb = String(prob)
if VERSION ≥ v"1.7" && !occursin("palmer", pb) && (pb != "watson") # palmer residual allocate
@allocated residual!(nls, x, Fx)
@test (@allocated residual!(nls, x, Fx)) == 0
end
m = OptimizationProblems.eval(Meta.parse("get_$(prob)_nls_nequ"))()
@test nls.nls_meta.nequ == m
end

@everywhere function test_compatibility(prob::Symbol, ndef::Integer = ndef)
prob_fn = eval(Meta.parse("PureJuMP.$(prob)"))
model = prob_fn(n = ndef)
Expand Down
Loading