Skip to content
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
27 changes: 18 additions & 9 deletions src/lbfgsb.jl
Original file line number Diff line number Diff line change
Expand Up @@ -8,9 +8,9 @@ It is a quasi-Newton optimization algorithm that supports bounds.

References

- R. H. Byrd, P. Lu and J. Nocedal. A Limited Memory Algorithm for Bound Constrained Optimization, (1995), SIAM Journal on Scientific and Statistical Computing , 16, 5, pp. 1190-1208.
- C. Zhu, R. H. Byrd and J. Nocedal. L-BFGS-B: Algorithm 778: L-BFGS-B, FORTRAN routines for large scale bound constrained optimization (1997), ACM Transactions on Mathematical Software, Vol 23, Num. 4, pp. 550 - 560.
- J.L. Morales and J. Nocedal. L-BFGS-B: Remark on Algorithm 778: L-BFGS-B, FORTRAN routines for large scale bound constrained optimization (2011), to appear in ACM Transactions on Mathematical Software.
- R. H. Byrd, P. Lu and J. Nocedal. A Limited Memory Algorithm for Bound Constrained Optimization, (1995), SIAM Journal on Scientific and Statistical Computing , 16, 5, pp. 1190-1208.
- C. Zhu, R. H. Byrd and J. Nocedal. L-BFGS-B: Algorithm 778: L-BFGS-B, FORTRAN routines for large scale bound constrained optimization (1997), ACM Transactions on Mathematical Software, Vol 23, Num. 4, pp. 550 - 560.
- J.L. Morales and J. Nocedal. L-BFGS-B: Remark on Algorithm 778: L-BFGS-B, FORTRAN routines for large scale bound constrained optimization (2011), to appear in ACM Transactions on Mathematical Software.
"""
@kwdef struct LBFGS
m::Int = 10
Expand Down Expand Up @@ -92,6 +92,9 @@ function SciMLBase.__solve(cache::OptimizationCache{
C
}
maxiters = Optimization._check_and_convert_maxiters(cache.solver_args.maxiters)
if isnothing(maxiters)
maxiters = 1000 # Default value for constrained problems
end

local x

Expand Down Expand Up @@ -124,7 +127,8 @@ function SciMLBase.__solve(cache::OptimizationCache{
cache.f.cons(cons_tmp, θ)
cons_tmp[eq_inds] .= cons_tmp[eq_inds] - cache.lcons[eq_inds]
cons_tmp[ineq_inds] .= cons_tmp[ineq_inds] .- cache.ucons[ineq_inds]
opt_state = Optimization.OptimizationState(u = θ, objective = x[1], p = cache.p, iter = iter_count[])
opt_state = Optimization.OptimizationState(
u = θ, objective = x[1], p = cache.p, iter = iter_count[])
if cache.callback(opt_state, x...)
error("Optimization halted by callback.")
end
Expand Down Expand Up @@ -166,10 +170,12 @@ function SciMLBase.__solve(cache::OptimizationCache{
n = length(cache.u0)

if cache.lb === nothing
optimizer, bounds = LBFGSB._opt_bounds(
optimizer,
bounds = LBFGSB._opt_bounds(
n, cache.opt.m, [-Inf for i in 1:n], [Inf for i in 1:n])
else
optimizer, bounds = LBFGSB._opt_bounds(
optimizer,
bounds = LBFGSB._opt_bounds(
n, cache.opt.m, solver_kwargs.lb, solver_kwargs.ub)
end

Expand Down Expand Up @@ -212,7 +218,8 @@ function SciMLBase.__solve(cache::OptimizationCache{
_loss = function (θ)
x = cache.f(θ, cache.p)
iter_count[] += 1
opt_state = Optimization.OptimizationState(u = θ, objective = x[1], p = cache.p, iter = iter_count[])
opt_state = Optimization.OptimizationState(
u = θ, objective = x[1], p = cache.p, iter = iter_count[])
if cache.callback(opt_state, x...)
error("Optimization halted by callback.")
end
Expand All @@ -222,10 +229,12 @@ function SciMLBase.__solve(cache::OptimizationCache{
n = length(cache.u0)

if cache.lb === nothing
optimizer, bounds = LBFGSB._opt_bounds(
optimizer,
bounds = LBFGSB._opt_bounds(
n, cache.opt.m, [-Inf for i in 1:n], [Inf for i in 1:n])
else
optimizer, bounds = LBFGSB._opt_bounds(
optimizer,
bounds = LBFGSB._opt_bounds(
n, cache.opt.m, solver_kwargs.lb, solver_kwargs.ub)
end

Expand Down
26 changes: 26 additions & 0 deletions test/native.jl
Original file line number Diff line number Diff line change
Expand Up @@ -61,3 +61,29 @@ optf1 = OptimizationFunction(loss, AutoSparseForwardDiff())
prob1 = OptimizationProblem(optf1, rand(5), data)
sol1 = solve(prob1, OptimizationOptimisers.Adam(), maxiters = 1000, callback = callback)
@test sol1.objective < l0

# Test for issue #958: LBFGS with constraints and no maxiters specified
rosenbrock_issue958(u, p) = (p[1] - u[1])^2 + p[2] * (u[2] - u[1]^2)^2
p_issue958 = [1.0, 100.0]
function cons_issue958!(out, x, p)
out[1] = sum(x)
end

optf_issue958 = OptimizationFunction(
rosenbrock_issue958, AutoForwardDiff(), cons = cons_issue958!)
prob_issue958 = OptimizationProblem(
optf_issue958, [-1, 1.0], p_issue958, lcons = [0.0], ucons = [0.0])

# This should not throw an error (issue #958)
sol_issue958 = solve(prob_issue958, Optimization.LBFGS())
# The key test is that it doesn't throw an error about maxiters being nothing
# It may return MaxIters if it doesn't converge in the default 1000 iterations
@test sol_issue958.retcode in [
Optimization.SciMLBase.ReturnCode.Success, Optimization.SciMLBase.ReturnCode.MaxIters]

# If it did converge, verify the constraint is satisfied
if sol_issue958.retcode == Optimization.SciMLBase.ReturnCode.Success
cons_result_issue958 = zeros(1)
cons_issue958!(cons_result_issue958, sol_issue958.u, p_issue958)
@test abs(cons_result_issue958[1]) < 1e-6
end
Loading