Skip to content

Commit 5f88adb

Browse files
committed
Refactor tests 1
1 parent 5fa096d commit 5f88adb

File tree

4 files changed

+82
-27
lines changed

4 files changed

+82
-27
lines changed

Project.toml

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -21,9 +21,10 @@ julia = "1.6"
2121

2222
[extras]
2323
ADNLPModels = "54578032-b7ea-4c30-94aa-7cbd1cce6c9a"
24+
Distributed = "8ba89e20-285c-5b6f-9357-94700520ee1b"
2425
NLPModels = "a4795742-8479-5a88-8948-cc11e1c8c1a6"
2526
NLPModelsJuMP = "792afdf1-32c1-5681-94e0-d7bf7a5df49e"
2627
Test = "8dfed614-e22c-5e08-85e1-65c5234f0b40"
2728

2829
[targets]
29-
test = ["ADNLPModels", "NLPModels", "NLPModelsJuMP", "Test"]
30+
test = ["ADNLPModels", "Distributed", "NLPModels", "NLPModelsJuMP", "Test"]

script_get_modified.jl

Lines changed: 34 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,34 @@
1+
using OptimizationProblems, Random
2+
3+
Random.seed!(1234)
4+
n_lim = 200
5+
6+
names_pb_vars = OptimizationProblems.meta[
7+
OptimizationProblems.meta.variable_nvar .== true,
8+
[:name]
9+
]
10+
list_of_changing_variables = String[]
11+
for prob in eachrow(names_pb_vars)
12+
for i=1:n_lim
13+
n_calc = OptimizationProblems.eval(Symbol(:get_, prob[:name], :_nvar))(n = i)
14+
if n_calc != i
15+
push!(list_of_changing_variables, prob[:name])
16+
break
17+
end
18+
end
19+
end
20+
21+
22+
#=
23+
julia> print(list_of_changing_variables)
24+
["NZF1", "bearing", "broydn7d", "catenary", "chain", "chainwoo", "channel", "clnlbeam", "clplatea", "clplateb", "clplatec", "controlinvestment", "dixmaane", "dixmaanf", "dixmaang", "dixmaanh", "dixmaani", "dixmaanj", "dixmaank", "dixmaanl", "dixmaanm", "dixmaann", "dixmaano", "dixmaanp", "elec", "hovercraft1d", "marine", "polygon", "polygon1", "polygon2", "polygon3", "powellsg", "robotarm", "spmsrtls", "structural", "woods"]
25+
=#
26+
27+
nlp = OptimizationProblems.ADNLPProblems.bt1()
28+
function check_obj_allocs(; nlp = nlp)
29+
x = zeros(2)
30+
obj(nlp, x)
31+
return nothing
32+
end
33+
@allocated check_obj_allocs()
34+
@allocated check_obj_allocs()

test/runtests.jl

Lines changed: 33 additions & 11 deletions
Original file line numberDiff line numberDiff line change
@@ -1,20 +1,36 @@
1-
using NLPModels, NLPModelsJuMP, OptimizationProblems, Test
1+
using Distributed
2+
3+
np = Sys.CPU_THREADS
4+
addprocs(np - 1)
5+
6+
@everywhere using NLPModels, NLPModelsJuMP, OptimizationProblems, Test
27

38
@test names(ADNLPProblems) == [:ADNLPProblems]
49

5-
import ADNLPModels
10+
@everywhere import ADNLPModels
611

712
const list_problems = intersect(names(ADNLPProblems), names(PureJuMP))
8-
# all problems have a JuMP and ADNLPModels formulations
9-
@test setdiff(union(names(ADNLPProblems), names(PureJuMP)), list_problems) ==
10-
[:ADNLPProblems, :PureJuMP]
13+
14+
# The problems included should be carefully argumented and issues
15+
# to create them added.
16+
# TODO: tests are limited for JuMP-only problems
17+
const list_problems_not_ADNLPProblems = Symbol[]
18+
const list_problems_not_PureJuMP = Symbol[]
19+
20+
const list_problems_ADNLPProblems = setdiff(list_problems, list_problems_not_ADNLPProblems)
21+
const list_problems_PureJuMP = setdiff(list_problems, list_problems_not_PureJuMP)
22+
23+
@test setdiff(union(names(ADNLPProblems), list_problems_not_ADNLPProblems), list_problems) ==
24+
[:ADNLPProblems]
25+
@test setdiff(union(names(PureJuMP), list_problems_not_PureJuMP), list_problems) ==
26+
[:PureJuMP]
1127

1228
include("test_utils.jl")
1329

1430
@test ndef == OptimizationProblems.PureJuMP.default_nvar
1531
@test ndef == OptimizationProblems.ADNLPProblems.default_nvar
1632

17-
@testset "problem: $prob" for prob in list_problems
33+
@everywhere function test_one_problem(prob::Symbol)
1834
pb = string(prob)
1935

2036
nvar = OptimizationProblems.eval(Symbol(:get_, prob, :_nvar))()
@@ -52,14 +68,18 @@ include("test_utils.jl")
5268
end
5369
end
5470

55-
@testset "Test problems compatibility for $prob" begin
56-
prob_fn = eval(Meta.parse("PureJuMP.$(prob)"))
57-
model = prob_fn(n = ndef)
58-
nlp_jump = MathOptNLPModel(model)
59-
test_compatibility(prob, nlp_jump, nlp_ad, ndef)
71+
if prob in list_problems_PureJuMP
72+
@testset "Test problems compatibility for $prob" begin
73+
prob_fn = eval(Meta.parse("PureJuMP.$(prob)"))
74+
model = prob_fn(n = ndef)
75+
nlp_jump = MathOptNLPModel(model)
76+
test_compatibility(prob, nlp_jump, nlp_ad, ndef)
77+
end
6078
end
6179
end
6280

81+
pmap(test_one_problem, list_problems_ADNLPProblems[1:10])
82+
6383
names_pb_vars = meta[
6484
meta.variable_nvar .== true,
6585
[:nvar, :name, :best_known_upper_bound, :best_known_lower_bound],
@@ -84,3 +104,5 @@ adproblems11 = (
84104
@test n11 != pb[:nvar]
85105
end
86106
end
107+
108+
rmprocs()

test/test_utils.jl

Lines changed: 13 additions & 15 deletions
Original file line numberDiff line numberDiff line change
@@ -1,13 +1,11 @@
1-
ndef = OptimizationProblems.default_nvar
2-
test_nvar = Int(round(ndef / 2))
1+
const ndef = OptimizationProblems.default_nvar
2+
const test_nvar = Int(round(ndef / 2))
33
meta = OptimizationProblems.meta
44

55
# Avoid SparseADJacobian/Hessian for too large problem as it requires a lot of memory for CIs
6-
simp_backend = "jacobian_backend = ADNLPModels.ForwardDiffADJacobian, hessian_backend = ADNLPModels.ForwardDiffADHessian"
6+
const simp_backend = "jacobian_backend = ADNLPModels.ForwardDiffADJacobian, hessian_backend = ADNLPModels.ForwardDiffADHessian"
77

8-
# list of functions used in unit tests
9-
10-
function meta_sanity_check(prob::Symbol, nlp::AbstractNLPModel)
8+
@everywhere function meta_sanity_check(prob::Symbol, nlp::AbstractNLPModel)
119
meta = OptimizationProblems.eval(Symbol(prob, :_meta))
1210
getnvar = OptimizationProblems.eval(Symbol(:get_, prob, :_nvar))(n = test_nvar)
1311
@test getnvar == meta[:nvar] || meta[:variable_nvar]
@@ -29,12 +27,12 @@ function meta_sanity_check(prob::Symbol, nlp::AbstractNLPModel)
2927
@test meta[:has_fixed_variables] == (get_ifix(nlp) != [])
3028
end
3129

32-
function test_in_place_constraints(prob::Symbol)
30+
@everywhere function test_in_place_constraints(prob::Symbol)
3331
nlp = OptimizationProblems.ADNLPProblems.eval(prob)()
3432
return test_in_place_constraints(prob, nlp)
3533
end
3634

37-
function test_in_place_constraints(prob::Symbol, nlp::AbstractNLPModel)
35+
@everywhere function test_in_place_constraints(prob::Symbol, nlp::AbstractNLPModel)
3836
x = get_x0(nlp)
3937
ncon = nlp.meta.nnln
4038
@test ncon > 0
@@ -47,13 +45,13 @@ function test_in_place_constraints(prob::Symbol, nlp::AbstractNLPModel)
4745
@test ncon == m
4846
end
4947

50-
function test_in_place_residual(prob::Symbol)
48+
@everywhere function test_in_place_residual(prob::Symbol)
5149
nls = OptimizationProblems.ADNLPProblems.eval(prob)(use_nls = true)
5250
@test typeof(nls) <: ADNLPModels.ADNLSModel
5351
return test_in_place_residual(prob, nls)
5452
end
5553

56-
function test_in_place_residual(prob::Symbol, nls::AbstractNLSModel)
54+
@everywhere function test_in_place_residual(prob::Symbol, nls::AbstractNLSModel)
5755
x = nls.meta.x0
5856
Fx = similar(x, nls.nls_meta.nequ)
5957
pb = String(prob)
@@ -65,7 +63,7 @@ function test_in_place_residual(prob::Symbol, nls::AbstractNLSModel)
6563
@test nls.nls_meta.nequ == m
6664
end
6765

68-
function test_compatibility(prob::Symbol, ndef::Integer = ndef)
66+
@everywhere function test_compatibility(prob::Symbol, ndef::Integer = ndef)
6967
prob_fn = eval(Meta.parse("PureJuMP.$(prob)"))
7068
model = prob_fn(n = ndef)
7169
nlp_jump = MathOptNLPModel(model)
@@ -83,7 +81,7 @@ function test_compatibility(prob::Symbol, ndef::Integer = ndef)
8381
return test_compatibility(prob, nlp_jump, nlp_ad, ndef)
8482
end
8583

86-
function test_compatibility(
84+
@everywhere function test_compatibility(
8785
prob::Symbol,
8886
nlp_jump,
8987
nlp_ad::ADNLPModels.ADModel,
@@ -126,7 +124,7 @@ function test_compatibility(
126124
meta_sanity_check(prob, nlp_ad)
127125
end
128126

129-
function test_multi_precision(
127+
@everywhere function test_multi_precision(
130128
prob::Symbol,
131129
nlp_ad::ADNLPModels.ADNLPModel{T};
132130
list_types = [Float32, Float64],
@@ -135,7 +133,7 @@ function test_multi_precision(
135133
test_multi_precision(prob, list_types = setdiff(list_types, [T]))
136134
end
137135

138-
function test_multi_precision(prob::Symbol; list_types = [Float32, Float64])
136+
@everywhere function test_multi_precision(prob::Symbol; list_types = [Float32, Float64])
139137
nvar = OptimizationProblems.eval(Symbol(:get_, prob, :_nvar))()
140138
ncon = OptimizationProblems.eval(Symbol(:get_, prob, :_ncon))()
141139

@@ -149,7 +147,7 @@ function test_multi_precision(prob::Symbol; list_types = [Float32, Float64])
149147
end
150148
end
151149

152-
function test_multi_precision(::Type{T}, nlp::AbstractNLPModel) where {T}
150+
@everywhere function test_multi_precision(::Type{T}, nlp::AbstractNLPModel) where {T}
153151
x0 = get_x0(nlp)
154152
@test eltype(x0) == T
155153
@test typeof(obj(nlp, x0)) == T

0 commit comments

Comments
 (0)