Skip to content

Commit 9cd8ece

Browse files
committed
fix
1 parent 60a580e commit 9cd8ece

File tree

4 files changed

+29
-48
lines changed

4 files changed

+29
-48
lines changed

test/runtests.jl

Lines changed: 4 additions & 35 deletions
Original file line numberDiff line numberDiff line change
@@ -28,16 +28,13 @@ const list_problems_PureJuMP = setdiff(list_problems, list_problems_not_PureJuMP
2828
include("test-defined-problems.jl")
2929
@everywhere include("test-utils.jl")
3030

31-
@test ndef == OptimizationProblems.PureJuMP.default_nvar
32-
@test ndef == OptimizationProblems.ADNLPProblems.default_nvar
33-
34-
@everywhere function make_ad_nlp(prob::Symbol; simp_backend="")
31+
@everywhere function make_ad_nlp(prob::Symbol; kwargs...)
3532
mod = ADNLPProblems
3633
if !isdefined(mod, prob)
3734
error("Problem $(prob) is not defined in ADNLPProblems on pid $(myid()).")
3835
end
3936
ctor = getfield(mod, prob)
40-
return isempty(simp_backend) ? ctor() : ctor(eval(Meta.parse(simp_backend)))
37+
return ctor(matrix_free = true; kwargs...)
4138
end
4239

4340
@everywhere function test_one_problem(prob::Symbol)
@@ -46,12 +43,7 @@ end
4643
nvar = OptimizationProblems.eval(Symbol(:get_, prob, :_nvar))()
4744
ncon = OptimizationProblems.eval(Symbol(:get_, prob, :_ncon))()
4845

49-
nlp_ad = if (nvar + ncon < 10000)
50-
make_ad_nlp(prob)
51-
else
52-
# Avoid SparseADJacobian for too large problem as it requires a lot of memory for CIs
53-
make_ad_nlp(prob, simp_backend = simp_backend)
54-
end
46+
nlp_ad = make_ad_nlp(prob)
5547

5648
@test nlp_ad.meta.name == pb
5749

@@ -103,29 +95,6 @@ end
10395

10496
pmap(test_one_problem, list_problems_ADNLPProblems)
10597

106-
names_pb_vars = meta[
107-
meta.variable_nvar .== true,
108-
[:nvar, :name, :best_known_upper_bound, :best_known_lower_bound],
109-
]
110-
adproblems = (
111-
eval(Meta.parse("ADNLPProblems.$(pb[:name])(" * simp_backend * ")")) for
112-
pb in eachrow(names_pb_vars)
113-
)
114-
adproblems11 = (
115-
eval(Meta.parse("ADNLPProblems.$(pb[:name])(n=$(13 * ndef), " * simp_backend * ")")) for
116-
pb in eachrow(names_pb_vars)
117-
)
118-
119-
@testset "Test scalable problems" begin
120-
@testset "problem: $pb" for (pb, nlp, nlp11) in
121-
zip(eachrow(names_pb_vars), adproblems, adproblems11)
122-
@test pb[:nvar] == nlp.meta.nvar
123-
n11 = OptimizationProblems.eval(Symbol(:get_, pb[:name], :_nvar))(n = 13 * ndef)
124-
@test n11 == nlp11.meta.nvar
125-
126-
# test that the problem is actually scalable
127-
@test n11 != pb[:nvar]
128-
end
129-
end
98+
include("test-scalable.jl")
13099

131100
rmprocs()

test/test-defined-problems.jl

Lines changed: 0 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -8,17 +8,13 @@
88
return (pid = myid(), missing = missing)
99
end
1010

11-
const list_problems_ADNLPProblems =
12-
setdiff(list_problems, list_problems_not_ADNLPProblems)
1311
probes = @sync begin
1412
for pid in workers()
1513
@async remotecall_fetch(probe_missing, pid, ADNLPProblems, list_problems_ADNLPProblems)
1614
end
1715
end
1816
@info "ADNLPProblems missing per worker" probes
1917

20-
const list_problems_PureJuMP =
21-
setdiff(list_problems, list_problems_not_PureJuMP)
2218
probes = @sync begin
2319
for pid in workers()
2420
@async remotecall_fetch(probe_missing, pid, PureJuMP, list_problems_PureJuMP)

test/test-scalable.jl

Lines changed: 20 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,20 @@
1+
@everywhere function test_scalable(pb)
2+
@testset "problem: $(pb[:name])" begin
3+
nlp = make_ad_nlp(Symbol(pb[:name]))
4+
@test pb[:nvar] == nlp.meta.nvar
5+
nlp11 = make_ad_nlp(Symbol(pb[:name]); n = 13 * ndef)
6+
n11 = OptimizationProblems.eval(Symbol(:get_, pb[:name], :_nvar))(n = 13 * ndef)
7+
@test n11 == nlp11.meta.nvar
8+
9+
# test that the problem is actually scalable
10+
@test n11 != pb[:nvar]
11+
end
12+
end
13+
14+
@testset "Test scalable problems" begin
15+
names_pb_vars = meta[
16+
meta.variable_nvar .== true,
17+
[:nvar, :name, :best_known_upper_bound, :best_known_lower_bound],
18+
]
19+
pmap(test_scalable, eachrow(names_pb_vars))
20+
end

test/test-utils.jl

Lines changed: 5 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -1,10 +1,11 @@
11
@everywhere const ndef = OptimizationProblems.default_nvar
2+
3+
@test ndef == OptimizationProblems.PureJuMP.default_nvar
4+
@test ndef == OptimizationProblems.ADNLPProblems.default_nvar
5+
26
@everywhere const test_nvar = Int(round(ndef / 2))
37
@everywhere meta = OptimizationProblems.meta
48

5-
# Avoid SparseADJacobian/Hessian for too large problem as it requires a lot of memory for CIs
6-
@everywhere const simp_backend = "jacobian_backend = ADNLPModels.ForwardDiffADJacobian, hessian_backend = ADNLPModels.ForwardDiffADHessian"
7-
89
@everywhere function meta_sanity_check(prob::Symbol, nlp::AbstractNLPModel)
910
meta = OptimizationProblems.eval(Symbol(prob, :_meta))
1011
getnvar = OptimizationProblems.eval(Symbol(:get_, prob, :_nvar))(n = test_nvar)
@@ -71,12 +72,7 @@ end
7172
nvar = OptimizationProblems.eval(Symbol(:get_, prob, :_nvar))()
7273
ncon = OptimizationProblems.eval(Symbol(:get_, prob, :_ncon))()
7374

74-
nlp_ad = if (nvar + ncon < 10000)
75-
eval(Meta.parse("ADNLPProblems.$(prob)()"))
76-
else
77-
# Avoid SparseADJacobian for too large problem as it requires a lot of memory for CIs
78-
eval(Meta.parse("ADNLPProblems.$(prob)(" * simp_backend * ")"))
79-
end
75+
nlp_ad = make_ad_nlp(prob)
8076

8177
return test_compatibility(prob, nlp_jump, nlp_ad, ndef)
8278
end

0 commit comments

Comments
 (0)