Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions lib/OptimizationAuglag/src/OptimizationAuglag.jl
Original file line number Diff line number Diff line change
Expand Up @@ -17,6 +17,7 @@ using LinearAlgebra: norm
end

SciMLBase.has_init(::AugLag) = true
SciMLBase.allowscallback(::AugLag) = true
SciMLBase.allowsbounds(::AugLag) = true
SciMLBase.requiresgradient(::AugLag) = true
SciMLBase.allowsconstraints(::AugLag) = true
Expand Down
2 changes: 1 addition & 1 deletion lib/OptimizationBBO/src/OptimizationBBO.jl
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,7 @@ abstract type BBO end

SciMLBase.requiresbounds(::BBO) = true
SciMLBase.allowsbounds(::BBO) = true

SciMLBase.allowscallback(opt::BBO) = true
SciMLBase.has_init(opt::BBO) = true

for j in string.(BlackBoxOptim.SingleObjectiveMethodNames)
Expand Down
2 changes: 1 addition & 1 deletion lib/OptimizationBase/src/solve.jl
Original file line number Diff line number Diff line change
Expand Up @@ -123,7 +123,7 @@ function _check_opt_alg(prob::SciMLBase.OptimizationProblem, alg; kwargs...)
(isnothing(prob.lcons) || isnothing(prob.ucons)) &&
throw(ArgumentError("Constrained optimization problem requires both `lcons` and `ucons` to be provided to OptimizationProblem. " *
"Example: OptimizationProblem(optf, u0, p; lcons=[-Inf], ucons=[0.0])"))
!allowscallback(alg) && haskey(kwargs, :callback) &&
!allowscallback(alg) && !(get(kwargs, :callback, DEFAULT_CALLBACK) isa NullCallback) &&
throw(IncompatibleOptimizerError("The algorithm $(typeof(alg)) does not support callbacks, remove the `callback` keyword argument from the `solve` call."))
requiresgradient(alg) &&
!(prob.f isa SciMLBase.AbstractOptimizationFunction) &&
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,7 @@ export CMAEvolutionStrategyOpt

struct CMAEvolutionStrategyOpt end

SciMLBase.allowscallback(opt::CMAEvolutionStrategyOpt) = true
SciMLBase.allowsbounds(::CMAEvolutionStrategyOpt) = true
SciMLBase.has_init(opt::CMAEvolutionStrategyOpt) = true
SciMLBase.requiresgradient(::CMAEvolutionStrategyOpt) = false
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,7 @@ using Reexport
@reexport using Evolutionary, OptimizationBase
using SciMLBase

SciMLBase.allowscallback(opt::Evolutionary.AbstractOptimizer) = true
SciMLBase.allowsbounds(opt::Evolutionary.AbstractOptimizer) = true
SciMLBase.allowsconstraints(opt::Evolutionary.AbstractOptimizer) = true
SciMLBase.has_init(opt::Evolutionary.AbstractOptimizer) = true
Expand Down
8 changes: 2 additions & 6 deletions lib/OptimizationGCMAES/src/OptimizationGCMAES.jl
Original file line number Diff line number Diff line change
Expand Up @@ -48,12 +48,8 @@ function __map_optimizer_args(cache::OptimizationBase.OptimizationCache, opt::GC
end

function SciMLBase.__init(prob::SciMLBase.OptimizationProblem,
opt::GCMAESOpt; σ0 = 0.2,
callback = (args...) -> (false),
progress = false, kwargs...)
return OptimizationCache(prob, opt; σ0 = σ0, callback = callback,
progress = progress,
kwargs...)
opt::GCMAESOpt; σ0 = 0.2, kwargs...)
return OptimizationCache(prob, opt; σ0 = σ0, kwargs...)
end

function SciMLBase.__solve(cache::OptimizationCache{O}) where {O <: GCMAESOpt}
Expand Down
5 changes: 4 additions & 1 deletion lib/OptimizationIpopt/src/OptimizationIpopt.jl
Original file line number Diff line number Diff line change
Expand Up @@ -170,6 +170,8 @@ function SciMLBase.has_init(alg::IpoptOptimizer)
true
end

SciMLBase.allowscallback(alg::IpoptOptimizer) = true

# Compatibility with OptimizationBase@v3
function SciMLBase.supports_opt_cache_interface(alg::IpoptOptimizer)
true
Expand Down Expand Up @@ -254,7 +256,8 @@ function __map_optimizer_args(cache,
)

# Set up progress callback
progress_callback = IpoptProgressLogger(progress, callback, prob, cache.n, cache.num_cons, maxiters, cache.iterations)
progress_callback = IpoptProgressLogger(
progress, callback, prob, cache.n, cache.num_cons, maxiters, cache.iterations)
intermediate = (args...) -> progress_callback(args...)
Ipopt.SetIntermediateCallback(prob, intermediate)

Expand Down
1 change: 1 addition & 0 deletions lib/OptimizationLBFGSB/src/OptimizationLBFGSB.jl
Original file line number Diff line number Diff line change
Expand Up @@ -31,6 +31,7 @@ References
ϵ = 1e-8
end

SciMLBase.allowscallback(::LBFGSB) = true
SciMLBase.has_init(::LBFGSB) = true
SciMLBase.allowsbounds(::LBFGSB) = true
SciMLBase.requiresgradient(::LBFGSB) = true
Expand Down
5 changes: 5 additions & 0 deletions lib/OptimizationMOI/src/OptimizationMOI.jl
Original file line number Diff line number Diff line change
Expand Up @@ -288,6 +288,11 @@ function SciMLBase.has_init(alg::Union{MOI.AbstractOptimizer,
true
end

function SciMLBase.allowscallback(alg::Union{MOI.AbstractOptimizer,
MOI.OptimizerWithAttributes})
true
end

# Compatibility with OptimizationBase@v3
function SciMLBase.supports_opt_cache_interface(alg::Union{MOI.AbstractOptimizer,
MOI.OptimizerWithAttributes})
Expand Down
3 changes: 2 additions & 1 deletion lib/OptimizationMadNLP/Project.toml
Original file line number Diff line number Diff line change
Expand Up @@ -30,6 +30,7 @@ julia = "1.10"
OptimizationBase = {path = "../OptimizationBase"}

[extras]
ADTypes = "47edcb42-4c32-4615-8424-f2b9edc5f35b"
Aqua = "4c88cf16-eb10-579e-8560-4a9242c79595"
DifferentiationInterface = "a0c0ee7d-e4b9-4e03-894e-1c5f64a51d63"
ForwardDiff = "f6369f11-7733-5829-9624-2563aa707210"
Expand All @@ -41,4 +42,4 @@ Test = "8dfed614-e22c-5e08-85e1-65c5234f0b40"
Zygote = "e88e6eb3-aa80-5325-afca-941959d7151f"

[targets]
test = ["Aqua", "DifferentiationInterface", "ForwardDiff", "ModelingToolkit", "Random", "ReverseDiff", "Test", "Symbolics", "Zygote"]
test = ["ADTypes", "Aqua", "DifferentiationInterface", "ForwardDiff", "ModelingToolkit", "Random", "ReverseDiff", "Test", "Symbolics", "Zygote"]
13 changes: 11 additions & 2 deletions lib/OptimizationMadNLP/src/OptimizationMadNLP.jl
Original file line number Diff line number Diff line change
Expand Up @@ -231,6 +231,10 @@ end
hessian_constant::Bool = false
hessian_approximation::Type = MadNLP.ExactHessian

# Initialization Options
nlp_scaling::Bool = true
nlp_scaling_max_gradient::Float64 = 100.0

# Linear solver configuration
linear_solver::Union{Nothing, Type} = nothing # e.g., MumpsSolver, LapackCPUSolver, UmfpackSolver

Expand All @@ -247,6 +251,8 @@ end

SciMLBase.has_init(opt::MadNLPOptimizer) = true

SciMLBase.allowscallback(opt::MadNLPOptimizer) = false

function SciMLBase.requiresgradient(opt::MadNLPOptimizer)
true
end
Expand Down Expand Up @@ -343,7 +349,7 @@ function __map_optimizer_args(cache,
nvar = length(cache.u0)
ncon = !isnothing(cache.lcons) ? length(cache.lcons) : 0

if callback !== DEFAULT_CALLBACK || progress
if !(callback isa OptimizationBase.NullCallback) || progress
@warn("MadNLP doesn't currently support user defined callbacks.")
end
# TODO: add support for user callbacks in MadNLP
Expand Down Expand Up @@ -374,7 +380,7 @@ function __map_optimizer_args(cache,
print_level = verbose
end

!isnothing(reltol) && @warn "reltol not supported by MadNLP."
!isnothing(reltol) && @warn "reltol not supported by MadNLP, use abstol instead."
tol = isnothing(abstol) ? 1e-8 : abstol
max_iter = isnothing(maxiters) ? 3000 : maxiters
max_wall_time = isnothing(maxtime) ? 1e6 : maxtime
Expand Down Expand Up @@ -411,6 +417,8 @@ function __map_optimizer_args(cache,
options[:jacobian_constant] = opt.jacobian_constant
options[:hessian_constant] = opt.hessian_constant
options[:hessian_approximation] = opt.hessian_approximation
options[:nlp_scaling] = opt.nlp_scaling
options[:nlp_scaling_max_gradient] = opt.nlp_scaling_max_gradient
options[:print_level] = print_level
options[:tol] = tol
options[:max_iter] = max_iter
Expand All @@ -422,6 +430,7 @@ end
function SciMLBase.__solve(cache::OptimizationCache{O}) where {O <: MadNLPOptimizer}
maxiters = OptimizationBase._check_and_convert_maxiters(cache.solver_args.maxiters)
maxtime = OptimizationBase._check_and_convert_maxtime(cache.solver_args.maxtime)
maxtime = maxtime isa Float32 ? convert(Float64, maxtime) : maxtime

meta, options = __map_optimizer_args(cache,
cache.opt;
Expand Down
14 changes: 8 additions & 6 deletions lib/OptimizationMadNLP/test/runtests.jl
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@ using OptimizationMadNLP
using OptimizationBase
using MadNLP
using Test
using ADTypes
import Zygote, ForwardDiff, ReverseDiff
using SparseArrays
using DifferentiationInterface: SecondOrder
Expand Down Expand Up @@ -497,7 +498,7 @@ end
@test min_dist > 0.5 # Electrons should be well-separated
end

@testset verbose = true "LBFGS vs Exact Hessian" begin
@testset verbose=true "LBFGS vs Exact Hessian" begin
# Test with moderate size to show LBFGS efficiency
np = 10 # Gyroelongated square dipyramid configuration
x0 = init_electrons_on_sphere(np)
Expand Down Expand Up @@ -525,11 +526,12 @@ end
)

sol = solve(prob, opt; abstol = 1e-6, maxiters = 300, verbose = false)
push!(results, name => (
objective = sol.objective,
iterations = sol.stats.iterations,
success = SciMLBase.successful_retcode(sol)
))
push!(results,
name => (
objective = sol.objective,
iterations = sol.stats.iterations,
success = SciMLBase.successful_retcode(sol)
))
end

# All methods should converge
Expand Down
1 change: 1 addition & 0 deletions lib/OptimizationManopt/src/OptimizationManopt.jl
Original file line number Diff line number Diff line change
Expand Up @@ -13,6 +13,7 @@ internal state.
abstract type AbstractManoptOptimizer end

SciMLBase.has_init(opt::AbstractManoptOptimizer) = true
SciMLBase.allowscallback(opt::AbstractManoptOptimizer) = true

function __map_optimizer_args!(cache::OptimizationBase.OptimizationCache,
opt::AbstractManoptOptimizer;
Expand Down
1 change: 1 addition & 0 deletions lib/OptimizationNLopt/src/OptimizationNLopt.jl
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,7 @@ using OptimizationBase: deduce_retcode

SciMLBase.allowsbounds(opt::Union{NLopt.Algorithm, NLopt.Opt}) = true
SciMLBase.has_init(opt::Union{NLopt.Algorithm, NLopt.Opt}) = true
SciMLBase.allowscallback(opt::Union{NLopt.Algorithm, NLopt.Opt}) = true

function SciMLBase.requiresgradient(opt::Union{NLopt.Algorithm, NLopt.Opt})
# https://github.com/JuliaOpt/NLopt.jl/blob/master/src/NLopt.jl#L18C7-L18C16
Expand Down
7 changes: 6 additions & 1 deletion lib/OptimizationOptimJL/src/OptimizationOptimJL.jl
Original file line number Diff line number Diff line change
Expand Up @@ -16,6 +16,10 @@ SciMLBase.has_init(opt::Optim.AbstractOptimizer) = true
SciMLBase.has_init(opt::Union{Optim.Fminbox, Optim.SAMIN}) = true
SciMLBase.has_init(opt::Optim.ConstrainedOptimizer) = true

SciMLBase.allowscallback(opt::Optim.AbstractOptimizer) = true
SciMLBase.allowscallback(opt::Union{Optim.Fminbox, Optim.SAMIN}) = true
SciMLBase.allowscallback(opt::Optim.ConstrainedOptimizer) = true

function SciMLBase.requiresgradient(opt::Optim.AbstractOptimizer)
!(opt isa Optim.ZerothOrderOptimizer)
end
Expand All @@ -39,8 +43,9 @@ function __map_optimizer_args(cache::OptimizationBase.OptimizationCache,
maxtime::Union{Number, Nothing} = nothing,
abstol::Union{Number, Nothing} = nothing,
reltol::Union{Number, Nothing} = nothing,
verbose = false,
kwargs...)
mapped_args = (; extended_trace = true, kwargs...)
mapped_args = (; extended_trace = true, show_trace = verbose, kwargs...)

if !isnothing(abstol)
mapped_args = (; mapped_args..., f_abstol = abstol)
Expand Down
4 changes: 2 additions & 2 deletions lib/OptimizationOptimisers/src/OptimizationOptimisers.jl
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,7 @@ using SciMLBase
SciMLBase.has_init(opt::AbstractRule) = true
SciMLBase.requiresgradient(opt::AbstractRule) = true
SciMLBase.allowsfg(opt::AbstractRule) = true
SciMLBase.allowscallback(opt::AbstractRule) = true

function SciMLBase.__init(
prob::SciMLBase.OptimizationProblem, opt::AbstractRule;
Expand Down Expand Up @@ -67,7 +68,6 @@ function SciMLBase.__solve(cache::OptimizationCache{O}) where {O <: AbstractRule
breakall = false
progress_id = :OptimizationOptimizersJL
for epoch in 1:epochs, d in data

if cache.f.fg !== nothing && dataiterate
x = cache.f.fg(G, θ, d)
iterations += 1
Expand Down Expand Up @@ -107,7 +107,7 @@ function SciMLBase.__solve(cache::OptimizationCache{O}) where {O <: AbstractRule
if cache.progress
message = "Loss: $(round(first(first(x)); digits = 3))"
@logmsg(LogLevel(-1), "Optimization", _id=progress_id,
message=message, progress=iterations/maxiters)
message=message, progress=iterations / maxiters)
end
if cache.solver_args.save_best
if first(x)[1] < first(min_err)[1] #found a better solution
Expand Down
1 change: 1 addition & 0 deletions lib/OptimizationPRIMA/src/OptimizationPRIMA.jl
Original file line number Diff line number Diff line change
Expand Up @@ -14,6 +14,7 @@ struct COBYLA <: PRIMASolvers end
export UOBYQA, NEWUOA, BOBYQA, LINCOA, COBYLA

SciMLBase.has_init(::PRIMASolvers) = true
SciMLBase.allowscallback(::PRIMASolvers) = true
SciMLBase.allowsconstraints(::Union{LINCOA, COBYLA}) = true
SciMLBase.allowsbounds(opt::Union{BOBYQA, LINCOA, COBYLA}) = true
SciMLBase.requiresconstraints(opt::COBYLA) = true
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,7 @@ using SciMLBase, OptimizationOptimJL, OptimizationOptimisers

struct PolyOpt end

SciMLBase.allowscallback(::PolyOpt) = SciMLBase.allowscallback(Optimisers.Adam) && SciMLBase.allowscallback(OptimizationOptimJL.BFGS)
SciMLBase.requiresgradient(opt::PolyOpt) = true

function SciMLBase.__solve(prob::OptimizationProblem,
Expand Down
3 changes: 3 additions & 0 deletions lib/OptimizationSciPy/src/OptimizationSciPy.jl
Original file line number Diff line number Diff line change
Expand Up @@ -216,17 +216,20 @@ for opt_type in [:ScipyMinimize, :ScipyDifferentialEvolution, :ScipyBasinhopping
:ScipyLinprog, :ScipyMilp]
@eval begin
SciMLBase.allowsbounds(::$opt_type) = true
SciMLBase.allowscallback(::$opt_type) = true
SciMLBase.has_init(::$opt_type) = true
end
end

for opt_type in [:ScipyMinimizeScalar, :ScipyRootScalar, :ScipyLeastSquares]
@eval begin
SciMLBase.has_init(::$opt_type) = true
SciMLBase.allowscallback(::$opt_type) = true
end
end

SciMLBase.has_init(::ScipyRoot) = true
SciMLBase.allowscallback(::ScipyRoot) = true

function SciMLBase.requiresgradient(opt::ScipyMinimize)
gradient_free = ["Nelder-Mead", "Powell", "COBYLA", "COBYQA"]
Expand Down
1 change: 1 addition & 0 deletions lib/OptimizationSophia/src/OptimizationSophia.jl
Original file line number Diff line number Diff line change
Expand Up @@ -62,6 +62,7 @@ struct Sophia
end

SciMLBase.has_init(opt::Sophia) = true
SciMLBase.allowscallback(opt::Sophia) = true
SciMLBase.requiresgradient(opt::Sophia) = true
SciMLBase.allowsfg(opt::Sophia) = true
SciMLBase.requireshessian(opt::Sophia) = true
Expand Down
Loading