Skip to content
Draft
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
5 changes: 5 additions & 0 deletions test/runtests.jl
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,7 @@ using ProximalOperators
using ADNLPModels,
OptimizationProblems,
OptimizationProblems.ADNLPProblems,
ManualNLPModels,
NLPModels,
NLPModelsModifiers,
RegularizedProblems,
Expand All @@ -18,6 +19,10 @@ const global bpdn, bpdn_nls, sol = bpdn_model(compound)
const global bpdn2, bpdn_nls2, sol2 = bpdn_model(compound, bounds = true)
const global λ = norm(grad(bpdn, zeros(bpdn.meta.nvar)), Inf) / 10

include("utils.jl")
include("test-solver.jl")

include("test-R2N.jl")
include("test_AL.jl")

for (mod, mod_name) ∈ ((x -> x, "exact"), (LSR1Model, "lsr1"), (LBFGSModel, "lbfgs"))
Expand Down
83 changes: 83 additions & 0 deletions test/test-R2N.jl
Original file line number Diff line number Diff line change
@@ -0,0 +1,83 @@
@testset "R2N" begin
# BASIC TESTS
# Test basic NLP with 2-norm
@testset "BASIC" begin
rosenbrock_nlp = construct_rosenbrock_nlp()
rosenbrock_reg_nlp = RegularizedNLPModel(rosenbrock_nlp, NormL2(0.01))

# Test first order status
first_order_kwargs = (atol = 1e-6, rtol = 1e-6)
test_solver(
rosenbrock_reg_nlp,
"R2N",
expected_status = :first_order,
solver_kwargs = first_order_kwargs,
)
solver, stats = R2NSolver(rosenbrock_reg_nlp), RegularizedExecutionStats(rosenbrock_reg_nlp)

# Test max time status
max_time_kwargs = (x0 = [π, -π], atol = 1e-16, rtol = 1e-16, max_time = 1e-12)
test_solver(
rosenbrock_reg_nlp,
"R2N",
expected_status = :max_time,
solver_kwargs = max_time_kwargs,
)

# Test max iter status
max_iter_kwargs = (x0 = [π, -π], atol = 1e-16, rtol = 1e-16, max_iter = 1)
test_solver(
rosenbrock_reg_nlp,
"R2N",
expected_status = :max_iter,
solver_kwargs = max_iter_kwargs,
)

# Test max eval status
max_eval_kwargs = (x0 = [π, -π], atol = 1e-16, rtol = 1e-16, max_eval = 1)
test_solver(
rosenbrock_reg_nlp,
"R2N",
expected_status = :max_eval,
solver_kwargs = max_eval_kwargs,
)
end
# BPDN TESTS

# Test bpdn with L-BFGS and 1-norm
@testset "BPDN" begin
bpdn_kwargs = (x0 = zeros(bpdn.meta.nvar), σk = 1.0, β = 1e16, atol = 1e-6, rtol = 1e-6)
reg_nlp = RegularizedNLPModel(LBFGSModel(bpdn), NormL1(λ))
test_solver(reg_nlp, "R2N", expected_status = :first_order, solver_kwargs = bpdn_kwargs)
solver, stats = R2NSolver(reg_nlp), RegularizedExecutionStats(reg_nlp)
@test @wrappedallocs(
solve!(solver, reg_nlp, stats, σk = 1.0, β = 1e16, atol = 1e-6, rtol = 1e-6)
) == 0

#test_solver(reg_nlp, # FIXME
# "R2N",
# expected_status = :first_order,
# solver_kwargs=bpdn_kwargs,
# solver_constructor_kwargs=(subsolver=R2DHSolver,))

# Test bpdn with L-SR1 and 0-norm
reg_nlp = RegularizedNLPModel(LSR1Model(bpdn), NormL0(λ))
test_solver(reg_nlp, "R2N", expected_status = :first_order, solver_kwargs = bpdn_kwargs)
solver, stats = R2NSolver(reg_nlp), RegularizedExecutionStats(reg_nlp)
@test @wrappedallocs(
solve!(solver, reg_nlp, stats, σk = 1.0, β = 1e16, atol = 1e-6, rtol = 1e-6)
) == 0

test_solver(
reg_nlp,
"R2N",
expected_status = :first_order,
solver_kwargs = bpdn_kwargs,
solver_constructor_kwargs = (subsolver = R2DHSolver,),
)
solver, stats = R2NSolver(reg_nlp, subsolver = R2DHSolver), RegularizedExecutionStats(reg_nlp)
@test @wrappedallocs(
solve!(solver, reg_nlp, stats, σk = 1.0, β = 1e16, atol = 1e-6, rtol = 1e-6)
) == 0
end
end
47 changes: 47 additions & 0 deletions test/test-solver.jl
Original file line number Diff line number Diff line change
@@ -0,0 +1,47 @@
function test_solver(
reg_nlp::R,
solver_name::String;
expected_status = :first_order,
solver_constructor_kwargs = (;),
solver_kwargs = (;),
) where {R}

# Test output with allocating calling form
solver_fun = getfield(RegularizedOptimization, Symbol(solver_name))
stats_basic = solver_fun(
reg_nlp.model,
reg_nlp.h,
ROSolverOptions();
solver_constructor_kwargs...,
solver_kwargs...,
)

x0 = get(solver_kwargs, :x0, reg_nlp.model.meta.x0)
@test typeof(stats_basic.solution) == typeof(x0)
@test length(stats_basic.solution) == reg_nlp.model.meta.nvar
@test typeof(stats_basic.dual_feas) == eltype(stats_basic.solution)
@test stats_basic.status == expected_status
@test obj(reg_nlp, stats_basic.solution) == stats_basic.objective
@test stats_basic.objective <= obj(reg_nlp, x0)

# Test output with optimized calling form
solver_constructor = getfield(RegularizedOptimization, Symbol(solver_name * "Solver"))
solver = solver_constructor(reg_nlp; solver_constructor_kwargs...)
stats_optimized = RegularizedExecutionStats(reg_nlp)

# Remove the x0 entry from solver_kwargs
optimized_solver_kwargs = Base.structdiff(solver_kwargs, NamedTuple{(:x0,)})
solve!(solver, reg_nlp, stats_optimized; x = x0, optimized_solver_kwargs...) # It would be interesting to check for allocations here as well but depending on
# the structure of solver_kwargs, some variables might get boxed, resulting in
# false positives, for example if tol = 1e-3; solver_kwargs = (atol = tol),
# then wrappedallocs would give a > 0 answer...
@test typeof(stats_optimized.solution) == typeof(x0)
@test length(stats_optimized.solution) == reg_nlp.model.meta.nvar
@test typeof(stats_optimized.dual_feas) == eltype(stats_optimized.solution)
@test stats_optimized.status == expected_status
@test obj(reg_nlp, stats_optimized.solution) == stats_optimized.objective
@test stats_optimized.objective <= obj(reg_nlp, x0)

# TODO: test that the optimized entries in stats_optimized and stats_basic are the same.

end
41 changes: 0 additions & 41 deletions test/test_allocs.jl
Original file line number Diff line number Diff line change
@@ -1,44 +1,3 @@
"""
@wrappedallocs(expr)

Given an expression, this macro wraps that expression inside a new function
which will evaluate that expression and measure the amount of memory allocated
by the expression. Wrapping the expression in a new function allows for more
accurate memory allocation detection when using global variables (e.g. when
at the REPL).

This code is based on that of https://github.com/JuliaAlgebra/TypedPolynomials.jl/blob/master/test/runtests.jl

For example, `@wrappedallocs(x + y)` produces:

```julia
function g(x1, x2)
@allocated x1 + x2
end
g(x, y)
```

You can use this macro in a unit test to verify that a function does not
allocate:

```
@test @wrappedallocs(x + y) == 0
```
"""
macro wrappedallocs(expr)
kwargs = [a for a in expr.args if isa(a, Expr)]
args = [a for a in expr.args if isa(a, Symbol)]

argnames = [gensym() for a in args]
kwargs_dict = Dict{Symbol, Any}(a.args[1] => a.args[2] for a in kwargs if a.head == :kw)
quote
function g($(argnames...); kwargs_dict...)
$(Expr(expr.head, argnames..., kwargs...)) # Call the function twice to make the allocated macro more stable
@allocated $(Expr(expr.head, argnames..., kwargs...))
end
$(Expr(:call, :g, [esc(a) for a in args]...))
end
end

# Test non allocating solve!
@testset "NLP allocs" begin
Expand Down
52 changes: 52 additions & 0 deletions test/utils.jl
Original file line number Diff line number Diff line change
@@ -0,0 +1,52 @@
"""
@wrappedallocs(expr)

Given an expression, this macro wraps that expression inside a new function
which will evaluate that expression and measure the amount of memory allocated
by the expression. Wrapping the expression in a new function allows for more
accurate memory allocation detection when using global variables (e.g. when
at the REPL).

This code is based on that of https://github.com/JuliaAlgebra/TypedPolynomials.jl/blob/master/test/runtests.jl

You can use this macro in a unit test to verify that a function does not
allocate:

```
@test @wrappedallocs(x + y) == 0
```
"""
macro wrappedallocs(expr)
kwargs = [a for a in expr.args if isa(a, Expr)]
args = [a for a in expr.args if isa(a, Symbol)]

argnames = [gensym() for a in args]
kwargs_dict = Dict{Symbol, Any}(a.args[1] => a.args[2] for a in kwargs if a.head == :kw)
quote
function g($(argnames...); kwargs_dict...)
$(Expr(expr.head, argnames..., kwargs...)) # Call the function twice to make the allocated macro more stable
@allocated $(Expr(expr.head, argnames..., kwargs...))
end
$(Expr(:call, :g, [esc(a) for a in args]...))
end
end

# Construct the rosenbrock problem.

function rosenbrock_f(x::Vector{T}) where {T <: Real}
100 * (x[2] - x[1]^2)^2 + (1 - x[1])^2
end

function rosenbrock_grad!(gx::Vector{T}, x::Vector{T}) where {T <: Real}
gx[1] = -400 * x[1] * (x[2] - x[1]^2) - 2 * (1 - x[1])
gx[2] = 200 * (x[2] - x[1]^2)
end

function rosenbrock_hv!(hv::Vector{T}, x::Vector{T}, v::Vector{T}; obj_weight = 1.0) where {T}
hv[1] = (1200 * x[1]^2 - 400 * x[2] + 2) * v[1] - 400 * x[1] * v[2]
hv[2] = -400 * x[1] * v[1] + 200 * v[2]
end

function construct_rosenbrock_nlp()
return NLPModel(zeros(2), rosenbrock_f, grad = rosenbrock_grad!, hprod = rosenbrock_hv!)
end
Loading