diff --git a/lib/OptimizationAuglag/src/OptimizationAuglag.jl b/lib/OptimizationAuglag/src/OptimizationAuglag.jl index 3ce0df17d..37c6a1a80 100644 --- a/lib/OptimizationAuglag/src/OptimizationAuglag.jl +++ b/lib/OptimizationAuglag/src/OptimizationAuglag.jl @@ -17,6 +17,7 @@ using LinearAlgebra: norm end SciMLBase.has_init(::AugLag) = true +SciMLBase.allowscallback(::AugLag) = true SciMLBase.allowsbounds(::AugLag) = true SciMLBase.requiresgradient(::AugLag) = true SciMLBase.allowsconstraints(::AugLag) = true diff --git a/lib/OptimizationBBO/src/OptimizationBBO.jl b/lib/OptimizationBBO/src/OptimizationBBO.jl index 59e507f26..ed244ab9d 100644 --- a/lib/OptimizationBBO/src/OptimizationBBO.jl +++ b/lib/OptimizationBBO/src/OptimizationBBO.jl @@ -9,7 +9,7 @@ abstract type BBO end SciMLBase.requiresbounds(::BBO) = true SciMLBase.allowsbounds(::BBO) = true - +SciMLBase.allowscallback(opt::BBO) = true SciMLBase.has_init(opt::BBO) = true for j in string.(BlackBoxOptim.SingleObjectiveMethodNames) diff --git a/lib/OptimizationBase/src/solve.jl b/lib/OptimizationBase/src/solve.jl index cd1a80607..777ca1d8f 100644 --- a/lib/OptimizationBase/src/solve.jl +++ b/lib/OptimizationBase/src/solve.jl @@ -123,7 +123,7 @@ function _check_opt_alg(prob::SciMLBase.OptimizationProblem, alg; kwargs...) (isnothing(prob.lcons) || isnothing(prob.ucons)) && throw(ArgumentError("Constrained optimization problem requires both `lcons` and `ucons` to be provided to OptimizationProblem. " * "Example: OptimizationProblem(optf, u0, p; lcons=[-Inf], ucons=[0.0])")) - !allowscallback(alg) && haskey(kwargs, :callback) && + !allowscallback(alg) && !(get(kwargs, :callback, DEFAULT_CALLBACK) isa NullCallback) && throw(IncompatibleOptimizerError("The algorithm $(typeof(alg)) does not support callbacks, remove the `callback` keyword argument from the `solve` call.")) requiresgradient(alg) && !(prob.f isa SciMLBase.AbstractOptimizationFunction) && diff --git a/lib/OptimizationCMAEvolutionStrategy/src/OptimizationCMAEvolutionStrategy.jl b/lib/OptimizationCMAEvolutionStrategy/src/OptimizationCMAEvolutionStrategy.jl index 9d372569b..ea1b58284 100644 --- a/lib/OptimizationCMAEvolutionStrategy/src/OptimizationCMAEvolutionStrategy.jl +++ b/lib/OptimizationCMAEvolutionStrategy/src/OptimizationCMAEvolutionStrategy.jl @@ -9,6 +9,7 @@ export CMAEvolutionStrategyOpt struct CMAEvolutionStrategyOpt end +SciMLBase.allowscallback(opt::CMAEvolutionStrategyOpt) = true SciMLBase.allowsbounds(::CMAEvolutionStrategyOpt) = true SciMLBase.has_init(opt::CMAEvolutionStrategyOpt) = true SciMLBase.requiresgradient(::CMAEvolutionStrategyOpt) = false diff --git a/lib/OptimizationEvolutionary/src/OptimizationEvolutionary.jl b/lib/OptimizationEvolutionary/src/OptimizationEvolutionary.jl index 6e4395e91..db5a5e46a 100644 --- a/lib/OptimizationEvolutionary/src/OptimizationEvolutionary.jl +++ b/lib/OptimizationEvolutionary/src/OptimizationEvolutionary.jl @@ -4,6 +4,7 @@ using Reexport @reexport using Evolutionary, OptimizationBase using SciMLBase +SciMLBase.allowscallback(opt::Evolutionary.AbstractOptimizer) = true SciMLBase.allowsbounds(opt::Evolutionary.AbstractOptimizer) = true SciMLBase.allowsconstraints(opt::Evolutionary.AbstractOptimizer) = true SciMLBase.has_init(opt::Evolutionary.AbstractOptimizer) = true diff --git a/lib/OptimizationGCMAES/src/OptimizationGCMAES.jl b/lib/OptimizationGCMAES/src/OptimizationGCMAES.jl index 84b0602be..8282cc306 100644 --- a/lib/OptimizationGCMAES/src/OptimizationGCMAES.jl +++ b/lib/OptimizationGCMAES/src/OptimizationGCMAES.jl @@ -48,12 +48,8 @@ function __map_optimizer_args(cache::OptimizationBase.OptimizationCache, opt::GC end function SciMLBase.__init(prob::SciMLBase.OptimizationProblem, - opt::GCMAESOpt; σ0 = 0.2, - callback = (args...) -> (false), - progress = false, kwargs...) - return OptimizationCache(prob, opt; σ0 = σ0, callback = callback, - progress = progress, - kwargs...) + opt::GCMAESOpt; σ0 = 0.2, kwargs...) + return OptimizationCache(prob, opt; σ0 = σ0, kwargs...) end function SciMLBase.__solve(cache::OptimizationCache{O}) where {O <: GCMAESOpt} diff --git a/lib/OptimizationIpopt/src/OptimizationIpopt.jl b/lib/OptimizationIpopt/src/OptimizationIpopt.jl index 719d41c08..6d4dbb648 100644 --- a/lib/OptimizationIpopt/src/OptimizationIpopt.jl +++ b/lib/OptimizationIpopt/src/OptimizationIpopt.jl @@ -170,6 +170,8 @@ function SciMLBase.has_init(alg::IpoptOptimizer) true end +SciMLBase.allowscallback(alg::IpoptOptimizer) = true + # Compatibility with OptimizationBase@v3 function SciMLBase.supports_opt_cache_interface(alg::IpoptOptimizer) true @@ -254,7 +256,8 @@ function __map_optimizer_args(cache, ) # Set up progress callback - progress_callback = IpoptProgressLogger(progress, callback, prob, cache.n, cache.num_cons, maxiters, cache.iterations) + progress_callback = IpoptProgressLogger( + progress, callback, prob, cache.n, cache.num_cons, maxiters, cache.iterations) intermediate = (args...) -> progress_callback(args...) Ipopt.SetIntermediateCallback(prob, intermediate) diff --git a/lib/OptimizationLBFGSB/src/OptimizationLBFGSB.jl b/lib/OptimizationLBFGSB/src/OptimizationLBFGSB.jl index 4d8a26771..f342c0345 100644 --- a/lib/OptimizationLBFGSB/src/OptimizationLBFGSB.jl +++ b/lib/OptimizationLBFGSB/src/OptimizationLBFGSB.jl @@ -31,6 +31,7 @@ References ϵ = 1e-8 end +SciMLBase.allowscallback(::LBFGSB) = true SciMLBase.has_init(::LBFGSB) = true SciMLBase.allowsbounds(::LBFGSB) = true SciMLBase.requiresgradient(::LBFGSB) = true diff --git a/lib/OptimizationMOI/src/OptimizationMOI.jl b/lib/OptimizationMOI/src/OptimizationMOI.jl index 4d3ead8e6..1678424a1 100644 --- a/lib/OptimizationMOI/src/OptimizationMOI.jl +++ b/lib/OptimizationMOI/src/OptimizationMOI.jl @@ -288,6 +288,11 @@ function SciMLBase.has_init(alg::Union{MOI.AbstractOptimizer, true end +function SciMLBase.allowscallback(alg::Union{MOI.AbstractOptimizer, + MOI.OptimizerWithAttributes}) + true +end + # Compatibility with OptimizationBase@v3 function SciMLBase.supports_opt_cache_interface(alg::Union{MOI.AbstractOptimizer, MOI.OptimizerWithAttributes}) diff --git a/lib/OptimizationMadNLP/Project.toml b/lib/OptimizationMadNLP/Project.toml index 22e009448..637320c4d 100644 --- a/lib/OptimizationMadNLP/Project.toml +++ b/lib/OptimizationMadNLP/Project.toml @@ -30,6 +30,7 @@ julia = "1.10" OptimizationBase = {path = "../OptimizationBase"} [extras] +ADTypes = "47edcb42-4c32-4615-8424-f2b9edc5f35b" Aqua = "4c88cf16-eb10-579e-8560-4a9242c79595" DifferentiationInterface = "a0c0ee7d-e4b9-4e03-894e-1c5f64a51d63" ForwardDiff = "f6369f11-7733-5829-9624-2563aa707210" @@ -41,4 +42,4 @@ Test = "8dfed614-e22c-5e08-85e1-65c5234f0b40" Zygote = "e88e6eb3-aa80-5325-afca-941959d7151f" [targets] -test = ["Aqua", "DifferentiationInterface", "ForwardDiff", "ModelingToolkit", "Random", "ReverseDiff", "Test", "Symbolics", "Zygote"] +test = ["ADTypes", "Aqua", "DifferentiationInterface", "ForwardDiff", "ModelingToolkit", "Random", "ReverseDiff", "Test", "Symbolics", "Zygote"] diff --git a/lib/OptimizationMadNLP/src/OptimizationMadNLP.jl b/lib/OptimizationMadNLP/src/OptimizationMadNLP.jl index b71c1178b..70a53a94e 100644 --- a/lib/OptimizationMadNLP/src/OptimizationMadNLP.jl +++ b/lib/OptimizationMadNLP/src/OptimizationMadNLP.jl @@ -231,6 +231,10 @@ end hessian_constant::Bool = false hessian_approximation::Type = MadNLP.ExactHessian + # Initialization Options + nlp_scaling::Bool = true + nlp_scaling_max_gradient::Float64 = 100.0 + # Linear solver configuration linear_solver::Union{Nothing, Type} = nothing # e.g., MumpsSolver, LapackCPUSolver, UmfpackSolver @@ -247,6 +251,8 @@ end SciMLBase.has_init(opt::MadNLPOptimizer) = true +SciMLBase.allowscallback(opt::MadNLPOptimizer) = false + function SciMLBase.requiresgradient(opt::MadNLPOptimizer) true end @@ -343,7 +349,7 @@ function __map_optimizer_args(cache, nvar = length(cache.u0) ncon = !isnothing(cache.lcons) ? length(cache.lcons) : 0 - if callback !== DEFAULT_CALLBACK || progress + if !(callback isa OptimizationBase.NullCallback) || progress @warn("MadNLP doesn't currently support user defined callbacks.") end # TODO: add support for user callbacks in MadNLP @@ -374,7 +380,7 @@ function __map_optimizer_args(cache, print_level = verbose end - !isnothing(reltol) && @warn "reltol not supported by MadNLP." + !isnothing(reltol) && @warn "reltol not supported by MadNLP, use abstol instead." tol = isnothing(abstol) ? 1e-8 : abstol max_iter = isnothing(maxiters) ? 3000 : maxiters max_wall_time = isnothing(maxtime) ? 1e6 : maxtime @@ -411,6 +417,8 @@ function __map_optimizer_args(cache, options[:jacobian_constant] = opt.jacobian_constant options[:hessian_constant] = opt.hessian_constant options[:hessian_approximation] = opt.hessian_approximation + options[:nlp_scaling] = opt.nlp_scaling + options[:nlp_scaling_max_gradient] = opt.nlp_scaling_max_gradient options[:print_level] = print_level options[:tol] = tol options[:max_iter] = max_iter @@ -422,6 +430,7 @@ end function SciMLBase.__solve(cache::OptimizationCache{O}) where {O <: MadNLPOptimizer} maxiters = OptimizationBase._check_and_convert_maxiters(cache.solver_args.maxiters) maxtime = OptimizationBase._check_and_convert_maxtime(cache.solver_args.maxtime) + maxtime = maxtime isa Float32 ? convert(Float64, maxtime) : maxtime meta, options = __map_optimizer_args(cache, cache.opt; diff --git a/lib/OptimizationMadNLP/test/runtests.jl b/lib/OptimizationMadNLP/test/runtests.jl index da2d21cbf..7be5e153d 100644 --- a/lib/OptimizationMadNLP/test/runtests.jl +++ b/lib/OptimizationMadNLP/test/runtests.jl @@ -2,6 +2,7 @@ using OptimizationMadNLP using OptimizationBase using MadNLP using Test +using ADTypes import Zygote, ForwardDiff, ReverseDiff using SparseArrays using DifferentiationInterface: SecondOrder @@ -497,7 +498,7 @@ end @test min_dist > 0.5 # Electrons should be well-separated end - @testset verbose = true "LBFGS vs Exact Hessian" begin + @testset verbose=true "LBFGS vs Exact Hessian" begin # Test with moderate size to show LBFGS efficiency np = 10 # Gyroelongated square dipyramid configuration x0 = init_electrons_on_sphere(np) @@ -525,11 +526,12 @@ end ) sol = solve(prob, opt; abstol = 1e-6, maxiters = 300, verbose = false) - push!(results, name => ( - objective = sol.objective, - iterations = sol.stats.iterations, - success = SciMLBase.successful_retcode(sol) - )) + push!(results, + name => ( + objective = sol.objective, + iterations = sol.stats.iterations, + success = SciMLBase.successful_retcode(sol) + )) end # All methods should converge diff --git a/lib/OptimizationManopt/src/OptimizationManopt.jl b/lib/OptimizationManopt/src/OptimizationManopt.jl index 8bee9abac..5b97f0035 100644 --- a/lib/OptimizationManopt/src/OptimizationManopt.jl +++ b/lib/OptimizationManopt/src/OptimizationManopt.jl @@ -13,6 +13,7 @@ internal state. abstract type AbstractManoptOptimizer end SciMLBase.has_init(opt::AbstractManoptOptimizer) = true +SciMLBase.allowscallback(opt::AbstractManoptOptimizer) = true function __map_optimizer_args!(cache::OptimizationBase.OptimizationCache, opt::AbstractManoptOptimizer; diff --git a/lib/OptimizationNLopt/src/OptimizationNLopt.jl b/lib/OptimizationNLopt/src/OptimizationNLopt.jl index eea9f4791..7dc6f8168 100644 --- a/lib/OptimizationNLopt/src/OptimizationNLopt.jl +++ b/lib/OptimizationNLopt/src/OptimizationNLopt.jl @@ -9,6 +9,7 @@ using OptimizationBase: deduce_retcode SciMLBase.allowsbounds(opt::Union{NLopt.Algorithm, NLopt.Opt}) = true SciMLBase.has_init(opt::Union{NLopt.Algorithm, NLopt.Opt}) = true +SciMLBase.allowscallback(opt::Union{NLopt.Algorithm, NLopt.Opt}) = true function SciMLBase.requiresgradient(opt::Union{NLopt.Algorithm, NLopt.Opt}) # https://github.com/JuliaOpt/NLopt.jl/blob/master/src/NLopt.jl#L18C7-L18C16 diff --git a/lib/OptimizationOptimJL/src/OptimizationOptimJL.jl b/lib/OptimizationOptimJL/src/OptimizationOptimJL.jl index cdd25f430..d94e8353d 100644 --- a/lib/OptimizationOptimJL/src/OptimizationOptimJL.jl +++ b/lib/OptimizationOptimJL/src/OptimizationOptimJL.jl @@ -16,6 +16,10 @@ SciMLBase.has_init(opt::Optim.AbstractOptimizer) = true SciMLBase.has_init(opt::Union{Optim.Fminbox, Optim.SAMIN}) = true SciMLBase.has_init(opt::Optim.ConstrainedOptimizer) = true +SciMLBase.allowscallback(opt::Optim.AbstractOptimizer) = true +SciMLBase.allowscallback(opt::Union{Optim.Fminbox, Optim.SAMIN}) = true +SciMLBase.allowscallback(opt::Optim.ConstrainedOptimizer) = true + function SciMLBase.requiresgradient(opt::Optim.AbstractOptimizer) !(opt isa Optim.ZerothOrderOptimizer) end @@ -39,8 +43,9 @@ function __map_optimizer_args(cache::OptimizationBase.OptimizationCache, maxtime::Union{Number, Nothing} = nothing, abstol::Union{Number, Nothing} = nothing, reltol::Union{Number, Nothing} = nothing, + verbose = false, kwargs...) - mapped_args = (; extended_trace = true, kwargs...) + mapped_args = (; extended_trace = true, show_trace = verbose, kwargs...) if !isnothing(abstol) mapped_args = (; mapped_args..., f_abstol = abstol) diff --git a/lib/OptimizationOptimisers/src/OptimizationOptimisers.jl b/lib/OptimizationOptimisers/src/OptimizationOptimisers.jl index 2476e5743..990234640 100644 --- a/lib/OptimizationOptimisers/src/OptimizationOptimisers.jl +++ b/lib/OptimizationOptimisers/src/OptimizationOptimisers.jl @@ -7,6 +7,7 @@ using SciMLBase SciMLBase.has_init(opt::AbstractRule) = true SciMLBase.requiresgradient(opt::AbstractRule) = true SciMLBase.allowsfg(opt::AbstractRule) = true +SciMLBase.allowscallback(opt::AbstractRule) = true function SciMLBase.__init( prob::SciMLBase.OptimizationProblem, opt::AbstractRule; @@ -67,7 +68,6 @@ function SciMLBase.__solve(cache::OptimizationCache{O}) where {O <: AbstractRule breakall = false progress_id = :OptimizationOptimizersJL for epoch in 1:epochs, d in data - if cache.f.fg !== nothing && dataiterate x = cache.f.fg(G, θ, d) iterations += 1 @@ -107,7 +107,7 @@ function SciMLBase.__solve(cache::OptimizationCache{O}) where {O <: AbstractRule if cache.progress message = "Loss: $(round(first(first(x)); digits = 3))" @logmsg(LogLevel(-1), "Optimization", _id=progress_id, - message=message, progress=iterations/maxiters) + message=message, progress=iterations / maxiters) end if cache.solver_args.save_best if first(x)[1] < first(min_err)[1] #found a better solution diff --git a/lib/OptimizationPRIMA/src/OptimizationPRIMA.jl b/lib/OptimizationPRIMA/src/OptimizationPRIMA.jl index 7dba5752b..e08860f5f 100644 --- a/lib/OptimizationPRIMA/src/OptimizationPRIMA.jl +++ b/lib/OptimizationPRIMA/src/OptimizationPRIMA.jl @@ -14,6 +14,7 @@ struct COBYLA <: PRIMASolvers end export UOBYQA, NEWUOA, BOBYQA, LINCOA, COBYLA SciMLBase.has_init(::PRIMASolvers) = true +SciMLBase.allowscallback(::PRIMASolvers) = true SciMLBase.allowsconstraints(::Union{LINCOA, COBYLA}) = true SciMLBase.allowsbounds(opt::Union{BOBYQA, LINCOA, COBYLA}) = true SciMLBase.requiresconstraints(opt::COBYLA) = true diff --git a/lib/OptimizationPolyalgorithms/src/OptimizationPolyalgorithms.jl b/lib/OptimizationPolyalgorithms/src/OptimizationPolyalgorithms.jl index b03cd88b0..092f202fb 100644 --- a/lib/OptimizationPolyalgorithms/src/OptimizationPolyalgorithms.jl +++ b/lib/OptimizationPolyalgorithms/src/OptimizationPolyalgorithms.jl @@ -6,6 +6,7 @@ using SciMLBase, OptimizationOptimJL, OptimizationOptimisers struct PolyOpt end +SciMLBase.allowscallback(::PolyOpt) = SciMLBase.allowscallback(Optimisers.Adam) && SciMLBase.allowscallback(OptimizationOptimJL.BFGS) SciMLBase.requiresgradient(opt::PolyOpt) = true function SciMLBase.__solve(prob::OptimizationProblem, diff --git a/lib/OptimizationSciPy/src/OptimizationSciPy.jl b/lib/OptimizationSciPy/src/OptimizationSciPy.jl index 0ac3f589b..43e39a799 100644 --- a/lib/OptimizationSciPy/src/OptimizationSciPy.jl +++ b/lib/OptimizationSciPy/src/OptimizationSciPy.jl @@ -216,6 +216,7 @@ for opt_type in [:ScipyMinimize, :ScipyDifferentialEvolution, :ScipyBasinhopping :ScipyLinprog, :ScipyMilp] @eval begin SciMLBase.allowsbounds(::$opt_type) = true + SciMLBase.allowscallback(::$opt_type) = true SciMLBase.has_init(::$opt_type) = true end end @@ -223,10 +224,12 @@ end for opt_type in [:ScipyMinimizeScalar, :ScipyRootScalar, :ScipyLeastSquares] @eval begin SciMLBase.has_init(::$opt_type) = true + SciMLBase.allowscallback(::$opt_type) = true end end SciMLBase.has_init(::ScipyRoot) = true +SciMLBase.allowscallback(::ScipyRoot) = true function SciMLBase.requiresgradient(opt::ScipyMinimize) gradient_free = ["Nelder-Mead", "Powell", "COBYLA", "COBYQA"] diff --git a/lib/OptimizationSophia/src/OptimizationSophia.jl b/lib/OptimizationSophia/src/OptimizationSophia.jl index e68702af6..93a2e8831 100644 --- a/lib/OptimizationSophia/src/OptimizationSophia.jl +++ b/lib/OptimizationSophia/src/OptimizationSophia.jl @@ -62,6 +62,7 @@ struct Sophia end SciMLBase.has_init(opt::Sophia) = true +SciMLBase.allowscallback(opt::Sophia) = true SciMLBase.requiresgradient(opt::Sophia) = true SciMLBase.allowsfg(opt::Sophia) = true SciMLBase.requireshessian(opt::Sophia) = true