Skip to content
Merged
Show file tree
Hide file tree
Changes from 3 commits
Commits
Show all changes
36 commits
Select commit Hold shift + click to select a range
1d322a1
Create OptimizationODE.jl
ParasPuneetSingh May 22, 2025
a81ae86
Create runtests.jl
ParasPuneetSingh May 22, 2025
ab80dcc
Create Project.toml
ParasPuneetSingh May 22, 2025
7558b76
Update CI.yml
ChrisRackauckas May 24, 2025
967ce77
Update OptimizationODE.jl
ParasPuneetSingh May 26, 2025
38733ac
Update runtests.jl
ParasPuneetSingh May 26, 2025
e2b310c
Update OptimizationODE.jl
ParasPuneetSingh May 27, 2025
6b79113
Update runtests.jl
ParasPuneetSingh May 27, 2025
1c7a004
Update Project.toml
ParasPuneetSingh May 27, 2025
792d6cf
Update OptimizationODE.jl
ParasPuneetSingh May 28, 2025
dffe5f5
Update runtests.jl
ParasPuneetSingh May 28, 2025
a2d406e
Update OptimizationODE.jl
ParasPuneetSingh May 30, 2025
cb0668a
Update runtests.jl
ParasPuneetSingh May 30, 2025
c7a06c4
Merge branch 'SciML:master' into master
ParasPuneetSingh May 30, 2025
8e451e1
Update OptimizationODE.jl
ParasPuneetSingh May 30, 2025
9e03221
Update runtests.jl
ParasPuneetSingh May 30, 2025
f9e6a78
Update lib/OptimizationODE/src/OptimizationODE.jl
ChrisRackauckas May 30, 2025
e53a9e9
Update lib/OptimizationODE/Project.toml
ChrisRackauckas May 30, 2025
18b0614
Update lib/OptimizationODE/Project.toml
ChrisRackauckas May 30, 2025
df75819
Update lib/OptimizationODE/src/OptimizationODE.jl
ChrisRackauckas May 30, 2025
6aa89af
Update lib/OptimizationODE/src/OptimizationODE.jl
ChrisRackauckas May 30, 2025
5c20429
Update lib/OptimizationODE/src/OptimizationODE.jl
ChrisRackauckas May 30, 2025
962832b
Update lib/OptimizationODE/src/OptimizationODE.jl
ChrisRackauckas May 30, 2025
b437cd8
Update lib/OptimizationODE/test/runtests.jl
ChrisRackauckas May 30, 2025
bcd6f75
Update lib/OptimizationODE/test/runtests.jl
ChrisRackauckas May 30, 2025
a10cb04
Update lib/OptimizationODE/test/runtests.jl
ChrisRackauckas May 30, 2025
6164774
Update lib/OptimizationODE/src/OptimizationODE.jl
ChrisRackauckas May 30, 2025
b8a7fe4
Update lib/OptimizationODE/src/OptimizationODE.jl
ChrisRackauckas May 30, 2025
8c2a16e
Update lib/OptimizationODE/test/runtests.jl
ChrisRackauckas May 30, 2025
72a4b62
Update lib/OptimizationODE/Project.toml
ChrisRackauckas May 30, 2025
f977f0d
Update Project.toml
ChrisRackauckas May 30, 2025
2267a46
Update lib/OptimizationODE/test/runtests.jl
ChrisRackauckas May 31, 2025
45d09aa
Update lib/OptimizationODE/src/OptimizationODE.jl
ChrisRackauckas May 31, 2025
238ba5d
Update lib/OptimizationODE/src/OptimizationODE.jl
ChrisRackauckas May 31, 2025
43c7ea7
Update Project.toml
ChrisRackauckas May 31, 2025
e68e640
Update lib/OptimizationODE/src/OptimizationODE.jl
ChrisRackauckas May 31, 2025
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
16 changes: 16 additions & 0 deletions lib/OptimizationODE/Project.toml
Original file line number Diff line number Diff line change
@@ -0,0 +1,16 @@
name = "OptimizationODE"
uuid = "dfa73e59-e644-4d8a-bf84-188d7ecb34e4"
authors = ["Paras Puneet Singh <[email protected]>"]
version = "0.1.0"

[deps]
ForwardDiff = "f6369f11-7733-5829-9624-2563aa707210"
Optimization = "7f7a1694-90dd-40f0-9382-eb1efda571ba"
Reexport = "189a3867-3050-52da-a836-e630ba90ab69"
SciMLBase = "0bca4576-84f4-4d90-8ffe-ffa030f20462"
Test = "8dfed614-e22c-5e08-85e1-65c5234f0b40"

[compat]
Optimization = "3"
Reexport = "1"
julia = "1.9"
129 changes: 129 additions & 0 deletions lib/OptimizationODE/src/OptimizationODE.jl
Original file line number Diff line number Diff line change
@@ -0,0 +1,129 @@
module OptimizationODE

using Reexport
@reexport using Optimization
using Optimization.SciMLBase

export ODEGradientDescent

# The optimizer “type”

struct ODEGradientDescent end

# capability flags
SciMLBase.requiresbounds(::ODEGradientDescent) = false
SciMLBase.allowsbounds(::ODEGradientDescent) = false
SciMLBase.allowscallback(::ODEGradientDescent) = false
SciMLBase.supports_opt_cache_interface(::ODEGradientDescent) = true
SciMLBase.requiresgradient(::ODEGradientDescent) = true
SciMLBase.requireshessian(::ODEGradientDescent) = false
SciMLBase.requiresconsjac(::ODEGradientDescent) = false
SciMLBase.requiresconshess(::ODEGradientDescent) = false

# Map standard kwargs to our solver’s args

function __map_optimizer_args!(
cache::OptimizationCache, opt::ODEGradientDescent;
callback = nothing,
maxiters::Union{Number,Nothing}=nothing,
maxtime::Union{Number,Nothing}=nothing,
abstol::Union{Number,Nothing}=nothing,
reltol::Union{Number,Nothing}=nothing,
η::Float64 = 0.1,
tmax::Float64 = 1.0,
dt::Float64 = 0.01,
kwargs...
)
# override our defaults
cache.solver_args = merge(cache.solver_args, (
η = η,
tmax = tmax,
dt = dt,
))
# now apply common options
if !(isnothing(maxiters))
cache.solver_args.maxiters = maxiters
end
if !(isnothing(maxtime))
cache.solver_args.maxtime = maxtime
end
return nothing
end

# 3) Initialize the cache (captures f, u0, bounds, and solver_args)

function SciMLBase.__init(
prob::SciMLBase.OptimizationProblem,
opt::ODEGradientDescent,
data = Optimization.DEFAULT_DATA;
η::Float64 = 0.1,
tmax::Float64 = 1.0,
dt::Float64 = 0.01,
callback = (args...)->false,
progress = false,
kwargs...
)
return OptimizationCache(
prob, opt, data;
η = η,
tmax = tmax,
dt = dt,
callback = callback,
progress = progress,
maxiters = nothing,
maxtime = nothing,
kwargs...
)
end

# 4) The actual solve loop: Euler integration of gradient descent

function SciMLBase.__solve(
cache::OptimizationCache{F,RC,LB,UB,LC,UC,S,O,D,P,C}
) where {F,RC,LB,UB,LC,UC,S,O<:ODEGradientDescent,D,P,C}

# unpack initial state & parameters
u0 = cache.u0
η = get(cache.solver_args, :η, 0.1)
tmax = get(cache.solver_args, :tmax, 1.0)
dt = get(cache.solver_args, :dt, 0.01)
maxiter = get(cache.solver_args, :maxiters, nothing)

# prepare working storage
u = copy(u0)
G = similar(u)

t = 0.0
iter = 0
# Euler loop
while (isnothing(maxiter) || iter < maxiter) && t <= tmax
# compute gradient in‐place
cache.f.grad(G, u, cache.p)
# Euler step
u .-= η .* G
t += dt
iter += 1
end

# final objective
fval = cache.f(u, cache.p)

# record stats: one final f‐eval, iter gradient‐evals
stats = Optimization.OptimizationStats(
iterations = iter,
time = 0.0, # could time() if you like
fevals = 1,
gevals = iter,
hevals = 0
)

return SciMLBase.build_solution(
cache, cache.opt,
u,
fval,
retcode = ReturnCode.Success,
stats = stats
)
end

end # module
45 changes: 45 additions & 0 deletions lib/OptimizationODE/test/runtests.jl
Original file line number Diff line number Diff line change
@@ -0,0 +1,45 @@
using Test
using Optimization
using Optimization.SciMLBase
using OptimizationODE

@testset "ODEGradientDescent Tests" begin

# Define the Rosenbrock objective and its gradient
function rosen(u, p)
return (p[1] - u[1])^2 + p[2] * (u[2] - u[1]^2)^2
end

function rosen_grad!(g, u, p, data)
g[1] = -2 * (p[1] - u[1]) - 4 * p[2] * u[1] * (u[2] - u[1]^2)
g[2] = 2 * p[2] * (u[2] - u[1]^2)
return g
end

# Set up the problem
u0 = [0.0, 0.0]
p = [1.0, 100.0]

# Wrap into an OptimizationFunction without AD, providing our gradient
f = OptimizationFunction(
rosen,
Optimization.SciMLBase.NoAD();
grad = rosen_grad!
)

prob = OptimizationProblem(f, u0, p)

# Solve with ODEGradientDescent
sol = solve(
prob,
ODEGradientDescent();
η = 0.001,
tmax = 1_000.0,
dt = 0.01
)

# Assertions
@test isapprox(sol.u[1], 1.0; atol = 1e-2)
@test isapprox(sol.u[2], 1.0; atol = 1e-2)

end
Loading