diff --git a/.github/workflows/CI.yml b/.github/workflows/CI.yml index e921c47b2..36f262b35 100644 --- a/.github/workflows/CI.yml +++ b/.github/workflows/CI.yml @@ -29,6 +29,7 @@ jobs: - OptimizationMultistartOptimization - OptimizationNLopt - OptimizationNOMAD + - OptimizationODE - OptimizationOptimJL - OptimizationOptimisers - OptimizationPRIMA diff --git a/lib/OptimizationODE/Project.toml b/lib/OptimizationODE/Project.toml new file mode 100644 index 000000000..50a303e15 --- /dev/null +++ b/lib/OptimizationODE/Project.toml @@ -0,0 +1,25 @@ +name = "OptimizationODE" +uuid = "dfa73e59-e644-4d8a-bf84-188d7ecb34e4" +authors = ["Paras Puneet Singh "] +version = "0.1.0" + +[deps] +ForwardDiff = "f6369f11-7733-5829-9624-2563aa707210" +Optimization = "7f7a1694-90dd-40f0-9382-eb1efda571ba" +OrdinaryDiffEq = "1dea7af3-3e70-54e6-95c3-0bf5283fa5ed" +Reexport = "189a3867-3050-52da-a836-e630ba90ab69" +SciMLBase = "0bca4576-84f4-4d90-8ffe-ffa030f20462" +SteadyStateDiffEq = "9672c7b4-1e72-59bd-8a11-6ac3964bc41f" +Test = "8dfed614-e22c-5e08-85e1-65c5234f0b40" + +[compat] +Optimization = "4" +Reexport = "1" +julia = "1.10" + +[extras] +ADTypes = "47edcb42-4c32-4615-8424-f2b9edc5f35b" +Test = "8dfed614-e22c-5e08-85e1-65c5234f0b40" + +[targets] +test = ["ADTypes", "Test"] diff --git a/lib/OptimizationODE/src/OptimizationODE.jl b/lib/OptimizationODE/src/OptimizationODE.jl new file mode 100644 index 000000000..ffacdc20a --- /dev/null +++ b/lib/OptimizationODE/src/OptimizationODE.jl @@ -0,0 +1,106 @@ +module OptimizationODE + +using Reexport +@reexport using Optimization, SciMLBase +using OrdinaryDiffEq, SteadyStateDiffEq + +export ODEOptimizer, ODEGradientDescent, RKChebyshevDescent, RKAccelerated, HighOrderDescent + +struct ODEOptimizer{T, T2} + solver::T + dt::T2 +end +ODEOptimizer(solver ; dt=nothing) = ODEOptimizer(solver, dt) + +# Solver Constructors (users call these) +ODEGradientDescent(; dt) = ODEOptimizer(Euler(); dt) +RKChebyshevDescent() = ODEOptimizer(ROCK2()) +RKAccelerated() = ODEOptimizer(Tsit5()) +HighOrderDescent() = ODEOptimizer(Vern7()) + + +SciMLBase.requiresbounds(::ODEOptimizer) = false +SciMLBase.allowsbounds(::ODEOptimizer) = false +SciMLBase.allowscallback(::ODEOptimizer) = true +SciMLBase.supports_opt_cache_interface(::ODEOptimizer) = true +SciMLBase.requiresgradient(::ODEOptimizer) = true +SciMLBase.requireshessian(::ODEOptimizer) = false +SciMLBase.requiresconsjac(::ODEOptimizer) = false +SciMLBase.requiresconshess(::ODEOptimizer) = false + + +function SciMLBase.__init(prob::OptimizationProblem, opt::ODEOptimizer; + callback=Optimization.DEFAULT_CALLBACK, progress=false, + maxiters=nothing, kwargs...) + + return OptimizationCache(prob, opt; callback=callback, progress=progress, + maxiters=maxiters, kwargs...) +end + +function SciMLBase.__solve( + cache::OptimizationCache{F,RC,LB,UB,LC,UC,S,O,D,P,C} + ) where {F,RC,LB,UB,LC,UC,S,O<:ODEOptimizer,D,P,C} + + dt = cache.opt.dt + maxit = get(cache.solver_args, :maxiters, 1000) + + u0 = copy(cache.u0) + p = cache.p + + if cache.f.grad === nothing + error("ODEOptimizer requires a gradient. Please provide a function with `grad` defined.") + end + + function f!(du, u, p, t) + cache.f.grad(du, u, p) + @. du = -du + return nothing + end + + ss_prob = SteadyStateProblem(f!, u0, p) + + algorithm = DynamicSS(cache.opt.solver) + + cb = cache.callback + if cb != Optimization.DEFAULT_CALLBACK || get(cache.solver_args,:progress,false) === true + function condition(u, t, integrator) + true + end + function affect!(integrator) + u_now = integrator.u + state = Optimization.OptimizationState(u=u_now, objective=cache.f(integrator.u, integrator.p)) + Optimization.callback_function(cb, state) + end + cb_struct = DiscreteCallback(condition, affect!) + callback = CallbackSet(cb_struct) + else + callback = nothing + end + + solve_kwargs = Dict{Symbol, Any}(:callback => callback) + if !isnothing(maxit) + solve_kwargs[:maxiters] = maxit + end + if dt !== nothing + solve_kwargs[:dt] = dt + end + + sol = solve(ss_prob, algorithm; solve_kwargs...) +has_destats = hasproperty(sol, :destats) +has_t = hasproperty(sol, :t) && !isempty(sol.t) + +stats = Optimization.OptimizationStats( + iterations = has_destats ? get(sol.destats, :iters, 10) : (has_t ? length(sol.t) - 1 : 10), + time = has_t ? sol.t[end] : 0.0, + fevals = has_destats ? get(sol.destats, :f_calls, 0) : 0, + gevals = has_destats ? get(sol.destats, :iters, 0) : 0, + hevals = 0 +) + + SciMLBase.build_solution(cache, cache.opt, sol.u, cache.f(sol.u, p); + retcode = ReturnCode.Success, + stats = stats + ) +end + +end diff --git a/lib/OptimizationODE/test/runtests.jl b/lib/OptimizationODE/test/runtests.jl new file mode 100644 index 000000000..f3d8a18c0 --- /dev/null +++ b/lib/OptimizationODE/test/runtests.jl @@ -0,0 +1,44 @@ +using Test +using OptimizationODE, SciMLBase, ADTypes + +@testset "OptimizationODE Tests" begin + + function f(x, p) + return sum(abs2, x) + end + + function g!(g, x, p) + @. g = 2 * x + end + + x0 = [2.0, -3.0] + p = [5.0] + + f_autodiff = OptimizationFunction(f, ADTypes.AutoForwardDiff()) + prob_auto = OptimizationProblem(f_autodiff, x0, p) + + for opt in (ODEGradientDescent(dt=0.01), RKChebyshevDescent(), RKAccelerated(), HighOrderDescent()) + sol = solve(prob_auto, opt; maxiters=50_000) + @test sol.u ≈ [0.0, 0.0] atol=1e-2 + @test sol.objective ≈ 0.0 atol=1e-2 + @test sol.retcode == ReturnCode.Success + end + + f_manual = OptimizationFunction(f, SciMLBase.NoAD(); grad=g!) + prob_manual = OptimizationProblem(f_manual, x0) + + for opt in (ODEGradientDescent(dt=0.01), RKChebyshevDescent(), RKAccelerated(), HighOrderDescent()) + sol = solve(prob_manual, opt; maxiters=50_000) + @test sol.u ≈ [0.0, 0.0] atol=1e-2 + @test sol.objective ≈ 0.0 atol=1e-2 + @test sol.retcode == ReturnCode.Success + end + + f_fail = OptimizationFunction(f, SciMLBase.NoAD()) + prob_fail = OptimizationProblem(f_fail, x0) + + for opt in (ODEGradientDescent(dt=0.001), RKChebyshevDescent(), RKAccelerated(), HighOrderDescent()) + @test_throws ErrorException solve(prob_fail, opt; maxiters=20_000) + end + +end