diff --git a/Project.toml b/Project.toml index 7fd7cf89e..27b780d95 100644 --- a/Project.toml +++ b/Project.toml @@ -45,7 +45,7 @@ Logging = "1.10" LoggingExtras = "0.4, 1" Lux = "1.12.4" MLUtils = "0.4" -ModelingToolkit = "10.23" +ModelingToolkit = "11" Mooncake = "0.4.138" Optim = ">= 1.4.1" Optimisers = ">= 0.2.5" @@ -64,7 +64,7 @@ SafeTestsets = "0.1" SciMLBase = "2.122.1" SciMLSensitivity = "7" SparseArrays = "1.10" -Symbolics = "6" +Symbolics = "6, 7" TerminalLoggers = "0.1" Test = "1.10" Tracker = "0.2" diff --git a/lib/OptimizationBase/Project.toml b/lib/OptimizationBase/Project.toml index a90d299ea..67e97a6d4 100644 --- a/lib/OptimizationBase/Project.toml +++ b/lib/OptimizationBase/Project.toml @@ -23,7 +23,6 @@ FiniteDiff = "6a86dc24-6348-571c-b903-95158fe2bd41" ForwardDiff = "f6369f11-7733-5829-9624-2563aa707210" MLDataDevices = "7e8f7934-dd98-4c1a-8fe8-92b47a384d40" MLUtils = "f1d291b0-491e-4a28-83b9-f70985020b54" -ModelingToolkit = "961ee093-0014-501f-94e3-6117800e7a78" ReverseDiff = "37e2e3b7-166d-5795-8a7a-e32c996b4267" SymbolicAnalysis = "4297ee4d-0239-47d8-ba5d-195ecdf594fe" Zygote = "e88e6eb3-aa80-5325-afca-941959d7151f" @@ -34,7 +33,6 @@ OptimizationFiniteDiffExt = "FiniteDiff" OptimizationForwardDiffExt = "ForwardDiff" OptimizationMLDataDevicesExt = "MLDataDevices" OptimizationMLUtilsExt = "MLUtils" -OptimizationMTKExt = "ModelingToolkit" OptimizationReverseDiffExt = "ReverseDiff" OptimizationSymbolicAnalysisExt = "SymbolicAnalysis" OptimizationZygoteExt = "Zygote" @@ -42,7 +40,7 @@ OptimizationZygoteExt = "Zygote" [compat] ADTypes = "1.14" ArrayInterface = "7.6" -DifferentiationInterface = "0.7" +DifferentiationInterface = "0.7.13" DocStringExtensions = "0.9" Enzyme = "0.13.2" FastClosures = "0.3" @@ -51,7 +49,6 @@ ForwardDiff = "0.10.26, 1" LinearAlgebra = "1.9, 1.10" MLDataDevices = "1" MLUtils = "0.4" -ModelingToolkit = "10.23" PDMats = "0.11" Reexport = "1.2" ReverseDiff = "1.14" diff --git a/lib/OptimizationBase/ext/OptimizationMTKBaseExt.jl b/lib/OptimizationBase/ext/OptimizationMTKBaseExt.jl new file mode 100644 index 000000000..ee77ab014 --- /dev/null +++ b/lib/OptimizationBase/ext/OptimizationMTKBaseExt.jl @@ -0,0 +1,213 @@ +module OptimizationMTKBaseExt + +import OptimizationBase, OptimizationBase.ArrayInterface +import SciMLBase +import SciMLBase: OptimizationFunction +import OptimizationBase.ADTypes: AutoSymbolics, AutoSparse +using ModelingToolkitBase + +function OptimizationBase.instantiate_function( + f::OptimizationFunction{true}, x, adtype::AutoSparse{<:AutoSymbolics}, p, + num_cons = 0; + g = false, h = false, hv = false, fg = false, fgh = false, + cons_j = false, cons_vjp = false, cons_jvp = false, cons_h = false, + lag_h = false) + p = isnothing(p) ? SciMLBase.NullParameters() : p + + sys = complete(ModelingToolkitBase.modelingtoolkitize(OptimizationProblem(f, x, p; + lcons = fill(0.0, + num_cons), + ucons = fill(0.0, + num_cons)))) + #sys = ModelingToolkit.structural_simplify(sys) + # don't need to pass `x` or `p` since they're defaults now + mtkprob = OptimizationProblem(sys, nothing; grad = g, hess = h, + sparse = true, cons_j = cons_j, cons_h = cons_h, + cons_sparse = true) + f = mtkprob.f + + grad = (G, θ, args...) -> f.grad(G, θ, mtkprob.p, args...) + + hess = (H, θ, args...) -> f.hess(H, θ, mtkprob.p, args...) + + hv = function (H, θ, v, args...) + res = similar(f.hess_prototype, eltype(θ)) + hess(res, θ, args...) + H .= res * v + end + + if !isnothing(f.cons) + cons = (res, θ) -> f.cons(res, θ, mtkprob.p) + cons_j = (J, θ) -> f.cons_j(J, θ, mtkprob.p) + cons_h = (res, θ) -> f.cons_h(res, θ, mtkprob.p) + else + cons = nothing + cons_j = nothing + cons_h = nothing + end + + return OptimizationFunction{true}(f.f, adtype; grad = grad, hess = hess, hv = hv, + cons = cons, cons_j = cons_j, cons_h = cons_h, + hess_prototype = f.hess_prototype, + cons_jac_prototype = f.cons_jac_prototype, + cons_hess_prototype = f.cons_hess_prototype, + expr = OptimizationBase.symbolify(f.expr), + cons_expr = OptimizationBase.symbolify.(f.cons_expr), + sys = sys, + observed = f.observed) +end + +function OptimizationBase.instantiate_function( + f::OptimizationFunction{true}, cache::OptimizationBase.ReInitCache, + adtype::AutoSparse{<:AutoSymbolics}, num_cons = 0; + g = false, h = false, hv = false, fg = false, fgh = false, + cons_j = false, cons_vjp = false, cons_jvp = false, cons_h = false, + lag_h = false) + p = isnothing(cache.p) ? SciMLBase.NullParameters() : cache.p + + sys = complete(ModelingToolkitBase.modelingtoolkitize(OptimizationProblem(f, cache.u0, + cache.p; + lcons = fill(0.0, + num_cons), + ucons = fill(0.0, + num_cons)))) + #sys = ModelingToolkit.structural_simplify(sys) + # don't need to pass `x` or `p` since they're defaults now + mtkprob = OptimizationProblem(sys, nothing; grad = g, hess = h, + sparse = true, cons_j = cons_j, cons_h = cons_h, + cons_sparse = true) + f = mtkprob.f + + grad = (G, θ, args...) -> f.grad(G, θ, mtkprob.p, args...) + + hess = (H, θ, args...) -> f.hess(H, θ, mtkprob.p, args...) + + hv = function (H, θ, v, args...) + res = similar(f.hess_prototype, eltype(θ)) + hess(res, θ, args...) + H .= res * v + end + if !isnothing(f.cons) + cons = (res, θ) -> f.cons(res, θ, mtkprob.p) + cons_j = (J, θ) -> f.cons_j(J, θ, mtkprob.p) + cons_h = (res, θ) -> f.cons_h(res, θ, mtkprob.p) + else + cons = nothing + cons_j = nothing + cons_h = nothing + end + + return OptimizationFunction{true}(f.f, adtype; grad = grad, hess = hess, hv = hv, + cons = cons, cons_j = cons_j, cons_h = cons_h, + hess_prototype = f.hess_prototype, + cons_jac_prototype = f.cons_jac_prototype, + cons_hess_prototype = f.cons_hess_prototype, + expr = OptimizationBase.symbolify(f.expr), + cons_expr = OptimizationBase.symbolify.(f.cons_expr), + sys = sys, + observed = f.observed) +end + +function OptimizationBase.instantiate_function( + f::OptimizationFunction{true}, x, adtype::AutoSymbolics, p, + num_cons = 0; g = false, h = false, hv = false, fg = false, fgh = false, + cons_j = false, cons_vjp = false, cons_jvp = false, cons_h = false, + lag_h = false) + p = isnothing(p) ? SciMLBase.NullParameters() : p + + sys = complete(ModelingToolkitBase.modelingtoolkitize(OptimizationProblem(f, x, p; + lcons = fill(0.0, + num_cons), + ucons = fill(0.0, + num_cons)))) + #sys = ModelingToolkit.structural_simplify(sys) + # don't need to pass `x` or `p` since they're defaults now + mtkprob = OptimizationProblem(sys, nothing; grad = g, hess = h, + sparse = false, cons_j = cons_j, cons_h = cons_h, + cons_sparse = false) + f = mtkprob.f + + grad = (G, θ, args...) -> f.grad(G, θ, mtkprob.p, args...) + + hess = (H, θ, args...) -> f.hess(H, θ, mtkprob.p, args...) + + hv = function (H, θ, v, args...) + res = ArrayInterface.zeromatrix(θ) + hess(res, θ, args...) + H .= res * v + end + + if !isnothing(f.cons) + cons = (res, θ) -> f.cons(res, θ, mtkprob.p) + cons_j = (J, θ) -> f.cons_j(J, θ, mtkprob.p) + cons_h = (res, θ) -> f.cons_h(res, θ, mtkprob.p) + else + cons = nothing + cons_j = nothing + cons_h = nothing + end + + return OptimizationFunction{true}(f.f, adtype; grad = grad, hess = hess, hv = hv, + cons = cons, cons_j = cons_j, cons_h = cons_h, + hess_prototype = f.hess_prototype, + cons_jac_prototype = f.cons_jac_prototype, + cons_hess_prototype = f.cons_hess_prototype, + expr = OptimizationBase.symbolify(f.expr), + cons_expr = OptimizationBase.symbolify.(f.cons_expr), + sys = sys, + observed = f.observed) +end + +function OptimizationBase.instantiate_function( + f::OptimizationFunction{true}, cache::OptimizationBase.ReInitCache, + adtype::AutoSymbolics, num_cons = 0; + g = false, h = false, hv = false, fg = false, fgh = false, + cons_j = false, cons_vjp = false, cons_jvp = false, cons_h = false, + lag_h = false) + p = isnothing(cache.p) ? SciMLBase.NullParameters() : cache.p + + sys = complete(ModelingToolkitBase.modelingtoolkitize(OptimizationProblem(f, cache.u0, + cache.p; + lcons = fill(0.0, + num_cons), + ucons = fill(0.0, + num_cons)))) + #sys = ModelingToolkit.structural_simplify(sys) + # don't need to pass `x` or `p` since they're defaults now + mtkprob = OptimizationProblem(sys, nothing; grad = g, hess = h, + sparse = false, cons_j = cons_j, cons_h = cons_h, + cons_sparse = false) + f = mtkprob.f + + grad = (G, θ, args...) -> f.grad(G, θ, mtkprob.p, args...) + + hess = (H, θ, args...) -> f.hess(H, θ, mtkprob.p, args...) + + hv = function (H, θ, v, args...) + res = ArrayInterface.zeromatrix(θ) + hess(res, θ, args...) + H .= res * v + end + + if !isnothing(f.cons) + cons = (res, θ) -> f.cons(res, θ, mtkprob.p) + cons_j = (J, θ) -> f.cons_j(J, θ, mtkprob.p) + cons_h = (res, θ) -> f.cons_h(res, θ, mtkprob.p) + else + cons = nothing + cons_j = nothing + cons_h = nothing + end + + return OptimizationFunction{true}(f.f, adtype; grad = grad, hess = hess, hv = hv, + cons = cons, cons_j = cons_j, cons_h = cons_h, + hess_prototype = f.hess_prototype, + cons_jac_prototype = f.cons_jac_prototype, + cons_hess_prototype = f.cons_hess_prototype, + expr = OptimizationBase.symbolify(f.expr), + cons_expr = OptimizationBase.symbolify.(f.cons_expr), + sys = sys, + observed = f.observed) +end + +end diff --git a/lib/OptimizationBase/ext/OptimizationMTKExt.jl b/lib/OptimizationBase/ext/OptimizationMTKExt.jl index 7538d8c3c..383a674cb 100644 --- a/lib/OptimizationBase/ext/OptimizationMTKExt.jl +++ b/lib/OptimizationBase/ext/OptimizationMTKExt.jl @@ -6,208 +6,210 @@ import SciMLBase: OptimizationFunction import OptimizationBase.ADTypes: AutoSymbolics, AutoSparse using ModelingToolkit -function OptimizationBase.instantiate_function( - f::OptimizationFunction{true}, x, adtype::AutoSparse{<:AutoSymbolics}, p, - num_cons = 0; - g = false, h = false, hv = false, fg = false, fgh = false, - cons_j = false, cons_vjp = false, cons_jvp = false, cons_h = false, - lag_h = false) - p = isnothing(p) ? SciMLBase.NullParameters() : p - - sys = complete(ModelingToolkit.modelingtoolkitize(OptimizationProblem(f, x, p; - lcons = fill(0.0, - num_cons), - ucons = fill(0.0, - num_cons)))) - #sys = ModelingToolkit.structural_simplify(sys) - # don't need to pass `x` or `p` since they're defaults now - mtkprob = OptimizationProblem(sys, nothing; grad = g, hess = h, - sparse = true, cons_j = cons_j, cons_h = cons_h, - cons_sparse = true) - f = mtkprob.f - - grad = (G, θ, args...) -> f.grad(G, θ, mtkprob.p, args...) - - hess = (H, θ, args...) -> f.hess(H, θ, mtkprob.p, args...) - - hv = function (H, θ, v, args...) - res = similar(f.hess_prototype, eltype(θ)) - hess(res, θ, args...) - H .= res * v +@static if pkgversion(ModelingToolkit) < v"11.0" + function OptimizationBase.instantiate_function( + f::OptimizationFunction{true}, x, adtype::AutoSparse{<:AutoSymbolics}, p, + num_cons = 0; + g = false, h = false, hv = false, fg = false, fgh = false, + cons_j = false, cons_vjp = false, cons_jvp = false, cons_h = false, + lag_h = false) + p = isnothing(p) ? SciMLBase.NullParameters() : p + + sys = complete(ModelingToolkit.modelingtoolkitize(OptimizationProblem(f, x, p; + lcons = fill(0.0, + num_cons), + ucons = fill(0.0, + num_cons)))) + #sys = ModelingToolkit.structural_simplify(sys) + # don't need to pass `x` or `p` since they're defaults now + mtkprob = OptimizationProblem(sys, nothing; grad = g, hess = h, + sparse = true, cons_j = cons_j, cons_h = cons_h, + cons_sparse = true) + f = mtkprob.f + + grad = (G, θ, args...) -> f.grad(G, θ, mtkprob.p, args...) + + hess = (H, θ, args...) -> f.hess(H, θ, mtkprob.p, args...) + + hv = function (H, θ, v, args...) + res = similar(f.hess_prototype, eltype(θ)) + hess(res, θ, args...) + H .= res * v + end + + if !isnothing(f.cons) + cons = (res, θ) -> f.cons(res, θ, mtkprob.p) + cons_j = (J, θ) -> f.cons_j(J, θ, mtkprob.p) + cons_h = (res, θ) -> f.cons_h(res, θ, mtkprob.p) + else + cons = nothing + cons_j = nothing + cons_h = nothing + end + + return OptimizationFunction{true}(f.f, adtype; grad = grad, hess = hess, hv = hv, + cons = cons, cons_j = cons_j, cons_h = cons_h, + hess_prototype = f.hess_prototype, + cons_jac_prototype = f.cons_jac_prototype, + cons_hess_prototype = f.cons_hess_prototype, + expr = OptimizationBase.symbolify(f.expr), + cons_expr = OptimizationBase.symbolify.(f.cons_expr), + sys = sys, + observed = f.observed) end - if !isnothing(f.cons) - cons = (res, θ) -> f.cons(res, θ, mtkprob.p) - cons_j = (J, θ) -> f.cons_j(J, θ, mtkprob.p) - cons_h = (res, θ) -> f.cons_h(res, θ, mtkprob.p) - else - cons = nothing - cons_j = nothing - cons_h = nothing + function OptimizationBase.instantiate_function( + f::OptimizationFunction{true}, cache::OptimizationBase.ReInitCache, + adtype::AutoSparse{<:AutoSymbolics}, num_cons = 0; + g = false, h = false, hv = false, fg = false, fgh = false, + cons_j = false, cons_vjp = false, cons_jvp = false, cons_h = false, + lag_h = false) + p = isnothing(cache.p) ? SciMLBase.NullParameters() : cache.p + + sys = complete(ModelingToolkit.modelingtoolkitize(OptimizationProblem(f, cache.u0, + cache.p; + lcons = fill(0.0, + num_cons), + ucons = fill(0.0, + num_cons)))) + #sys = ModelingToolkit.structural_simplify(sys) + # don't need to pass `x` or `p` since they're defaults now + mtkprob = OptimizationProblem(sys, nothing; grad = g, hess = h, + sparse = true, cons_j = cons_j, cons_h = cons_h, + cons_sparse = true) + f = mtkprob.f + + grad = (G, θ, args...) -> f.grad(G, θ, mtkprob.p, args...) + + hess = (H, θ, args...) -> f.hess(H, θ, mtkprob.p, args...) + + hv = function (H, θ, v, args...) + res = similar(f.hess_prototype, eltype(θ)) + hess(res, θ, args...) + H .= res * v + end + if !isnothing(f.cons) + cons = (res, θ) -> f.cons(res, θ, mtkprob.p) + cons_j = (J, θ) -> f.cons_j(J, θ, mtkprob.p) + cons_h = (res, θ) -> f.cons_h(res, θ, mtkprob.p) + else + cons = nothing + cons_j = nothing + cons_h = nothing + end + + return OptimizationFunction{true}(f.f, adtype; grad = grad, hess = hess, hv = hv, + cons = cons, cons_j = cons_j, cons_h = cons_h, + hess_prototype = f.hess_prototype, + cons_jac_prototype = f.cons_jac_prototype, + cons_hess_prototype = f.cons_hess_prototype, + expr = OptimizationBase.symbolify(f.expr), + cons_expr = OptimizationBase.symbolify.(f.cons_expr), + sys = sys, + observed = f.observed) end - return OptimizationFunction{true}(f.f, adtype; grad = grad, hess = hess, hv = hv, - cons = cons, cons_j = cons_j, cons_h = cons_h, - hess_prototype = f.hess_prototype, - cons_jac_prototype = f.cons_jac_prototype, - cons_hess_prototype = f.cons_hess_prototype, - expr = OptimizationBase.symbolify(f.expr), - cons_expr = OptimizationBase.symbolify.(f.cons_expr), - sys = sys, - observed = f.observed) -end - -function OptimizationBase.instantiate_function( - f::OptimizationFunction{true}, cache::OptimizationBase.ReInitCache, - adtype::AutoSparse{<:AutoSymbolics}, num_cons = 0; - g = false, h = false, hv = false, fg = false, fgh = false, - cons_j = false, cons_vjp = false, cons_jvp = false, cons_h = false, - lag_h = false) - p = isnothing(cache.p) ? SciMLBase.NullParameters() : cache.p - - sys = complete(ModelingToolkit.modelingtoolkitize(OptimizationProblem(f, cache.u0, - cache.p; - lcons = fill(0.0, - num_cons), - ucons = fill(0.0, - num_cons)))) - #sys = ModelingToolkit.structural_simplify(sys) - # don't need to pass `x` or `p` since they're defaults now - mtkprob = OptimizationProblem(sys, nothing; grad = g, hess = h, - sparse = true, cons_j = cons_j, cons_h = cons_h, - cons_sparse = true) - f = mtkprob.f - - grad = (G, θ, args...) -> f.grad(G, θ, mtkprob.p, args...) - - hess = (H, θ, args...) -> f.hess(H, θ, mtkprob.p, args...) - - hv = function (H, θ, v, args...) - res = similar(f.hess_prototype, eltype(θ)) - hess(res, θ, args...) - H .= res * v - end - if !isnothing(f.cons) - cons = (res, θ) -> f.cons(res, θ, mtkprob.p) - cons_j = (J, θ) -> f.cons_j(J, θ, mtkprob.p) - cons_h = (res, θ) -> f.cons_h(res, θ, mtkprob.p) - else - cons = nothing - cons_j = nothing - cons_h = nothing - end - - return OptimizationFunction{true}(f.f, adtype; grad = grad, hess = hess, hv = hv, - cons = cons, cons_j = cons_j, cons_h = cons_h, - hess_prototype = f.hess_prototype, - cons_jac_prototype = f.cons_jac_prototype, - cons_hess_prototype = f.cons_hess_prototype, - expr = OptimizationBase.symbolify(f.expr), - cons_expr = OptimizationBase.symbolify.(f.cons_expr), - sys = sys, - observed = f.observed) -end - -function OptimizationBase.instantiate_function( - f::OptimizationFunction{true}, x, adtype::AutoSymbolics, p, - num_cons = 0; g = false, h = false, hv = false, fg = false, fgh = false, - cons_j = false, cons_vjp = false, cons_jvp = false, cons_h = false, - lag_h = false) - p = isnothing(p) ? SciMLBase.NullParameters() : p - - sys = complete(ModelingToolkit.modelingtoolkitize(OptimizationProblem(f, x, p; - lcons = fill(0.0, - num_cons), - ucons = fill(0.0, - num_cons)))) - #sys = ModelingToolkit.structural_simplify(sys) - # don't need to pass `x` or `p` since they're defaults now - mtkprob = OptimizationProblem(sys, nothing; grad = g, hess = h, - sparse = false, cons_j = cons_j, cons_h = cons_h, - cons_sparse = false) - f = mtkprob.f - - grad = (G, θ, args...) -> f.grad(G, θ, mtkprob.p, args...) - - hess = (H, θ, args...) -> f.hess(H, θ, mtkprob.p, args...) - - hv = function (H, θ, v, args...) - res = ArrayInterface.zeromatrix(θ) - hess(res, θ, args...) - H .= res * v + function OptimizationBase.instantiate_function( + f::OptimizationFunction{true}, x, adtype::AutoSymbolics, p, + num_cons = 0; g = false, h = false, hv = false, fg = false, fgh = false, + cons_j = false, cons_vjp = false, cons_jvp = false, cons_h = false, + lag_h = false) + p = isnothing(p) ? SciMLBase.NullParameters() : p + + sys = complete(ModelingToolkit.modelingtoolkitize(OptimizationProblem(f, x, p; + lcons = fill(0.0, + num_cons), + ucons = fill(0.0, + num_cons)))) + #sys = ModelingToolkit.structural_simplify(sys) + # don't need to pass `x` or `p` since they're defaults now + mtkprob = OptimizationProblem(sys, nothing; grad = g, hess = h, + sparse = false, cons_j = cons_j, cons_h = cons_h, + cons_sparse = false) + f = mtkprob.f + + grad = (G, θ, args...) -> f.grad(G, θ, mtkprob.p, args...) + + hess = (H, θ, args...) -> f.hess(H, θ, mtkprob.p, args...) + + hv = function (H, θ, v, args...) + res = ArrayInterface.zeromatrix(θ) + hess(res, θ, args...) + H .= res * v + end + + if !isnothing(f.cons) + cons = (res, θ) -> f.cons(res, θ, mtkprob.p) + cons_j = (J, θ) -> f.cons_j(J, θ, mtkprob.p) + cons_h = (res, θ) -> f.cons_h(res, θ, mtkprob.p) + else + cons = nothing + cons_j = nothing + cons_h = nothing + end + + return OptimizationFunction{true}(f.f, adtype; grad = grad, hess = hess, hv = hv, + cons = cons, cons_j = cons_j, cons_h = cons_h, + hess_prototype = f.hess_prototype, + cons_jac_prototype = f.cons_jac_prototype, + cons_hess_prototype = f.cons_hess_prototype, + expr = OptimizationBase.symbolify(f.expr), + cons_expr = OptimizationBase.symbolify.(f.cons_expr), + sys = sys, + observed = f.observed) end - if !isnothing(f.cons) - cons = (res, θ) -> f.cons(res, θ, mtkprob.p) - cons_j = (J, θ) -> f.cons_j(J, θ, mtkprob.p) - cons_h = (res, θ) -> f.cons_h(res, θ, mtkprob.p) - else - cons = nothing - cons_j = nothing - cons_h = nothing + function OptimizationBase.instantiate_function( + f::OptimizationFunction{true}, cache::OptimizationBase.ReInitCache, + adtype::AutoSymbolics, num_cons = 0; + g = false, h = false, hv = false, fg = false, fgh = false, + cons_j = false, cons_vjp = false, cons_jvp = false, cons_h = false, + lag_h = false) + p = isnothing(cache.p) ? SciMLBase.NullParameters() : cache.p + + sys = complete(ModelingToolkit.modelingtoolkitize(OptimizationProblem(f, cache.u0, + cache.p; + lcons = fill(0.0, + num_cons), + ucons = fill(0.0, + num_cons)))) + #sys = ModelingToolkit.structural_simplify(sys) + # don't need to pass `x` or `p` since they're defaults now + mtkprob = OptimizationProblem(sys, nothing; grad = g, hess = h, + sparse = false, cons_j = cons_j, cons_h = cons_h, + cons_sparse = false) + f = mtkprob.f + + grad = (G, θ, args...) -> f.grad(G, θ, mtkprob.p, args...) + + hess = (H, θ, args...) -> f.hess(H, θ, mtkprob.p, args...) + + hv = function (H, θ, v, args...) + res = ArrayInterface.zeromatrix(θ) + hess(res, θ, args...) + H .= res * v + end + + if !isnothing(f.cons) + cons = (res, θ) -> f.cons(res, θ, mtkprob.p) + cons_j = (J, θ) -> f.cons_j(J, θ, mtkprob.p) + cons_h = (res, θ) -> f.cons_h(res, θ, mtkprob.p) + else + cons = nothing + cons_j = nothing + cons_h = nothing + end + + return OptimizationFunction{true}(f.f, adtype; grad = grad, hess = hess, hv = hv, + cons = cons, cons_j = cons_j, cons_h = cons_h, + hess_prototype = f.hess_prototype, + cons_jac_prototype = f.cons_jac_prototype, + cons_hess_prototype = f.cons_hess_prototype, + expr = OptimizationBase.symbolify(f.expr), + cons_expr = OptimizationBase.symbolify.(f.cons_expr), + sys = sys, + observed = f.observed) end - - return OptimizationFunction{true}(f.f, adtype; grad = grad, hess = hess, hv = hv, - cons = cons, cons_j = cons_j, cons_h = cons_h, - hess_prototype = f.hess_prototype, - cons_jac_prototype = f.cons_jac_prototype, - cons_hess_prototype = f.cons_hess_prototype, - expr = OptimizationBase.symbolify(f.expr), - cons_expr = OptimizationBase.symbolify.(f.cons_expr), - sys = sys, - observed = f.observed) -end - -function OptimizationBase.instantiate_function( - f::OptimizationFunction{true}, cache::OptimizationBase.ReInitCache, - adtype::AutoSymbolics, num_cons = 0; - g = false, h = false, hv = false, fg = false, fgh = false, - cons_j = false, cons_vjp = false, cons_jvp = false, cons_h = false, - lag_h = false) - p = isnothing(cache.p) ? SciMLBase.NullParameters() : cache.p - - sys = complete(ModelingToolkit.modelingtoolkitize(OptimizationProblem(f, cache.u0, - cache.p; - lcons = fill(0.0, - num_cons), - ucons = fill(0.0, - num_cons)))) - #sys = ModelingToolkit.structural_simplify(sys) - # don't need to pass `x` or `p` since they're defaults now - mtkprob = OptimizationProblem(sys, nothing; grad = g, hess = h, - sparse = false, cons_j = cons_j, cons_h = cons_h, - cons_sparse = false) - f = mtkprob.f - - grad = (G, θ, args...) -> f.grad(G, θ, mtkprob.p, args...) - - hess = (H, θ, args...) -> f.hess(H, θ, mtkprob.p, args...) - - hv = function (H, θ, v, args...) - res = ArrayInterface.zeromatrix(θ) - hess(res, θ, args...) - H .= res * v - end - - if !isnothing(f.cons) - cons = (res, θ) -> f.cons(res, θ, mtkprob.p) - cons_j = (J, θ) -> f.cons_j(J, θ, mtkprob.p) - cons_h = (res, θ) -> f.cons_h(res, θ, mtkprob.p) - else - cons = nothing - cons_j = nothing - cons_h = nothing - end - - return OptimizationFunction{true}(f.f, adtype; grad = grad, hess = hess, hv = hv, - cons = cons, cons_j = cons_j, cons_h = cons_h, - hess_prototype = f.hess_prototype, - cons_jac_prototype = f.cons_jac_prototype, - cons_hess_prototype = f.cons_hess_prototype, - expr = OptimizationBase.symbolify(f.expr), - cons_expr = OptimizationBase.symbolify.(f.cons_expr), - sys = sys, - observed = f.observed) end end diff --git a/lib/OptimizationBase/src/OptimizationDIExt.jl b/lib/OptimizationBase/src/OptimizationDIExt.jl index 9d3ec006a..431df45f1 100644 --- a/lib/OptimizationBase/src/OptimizationDIExt.jl +++ b/lib/OptimizationBase/src/OptimizationDIExt.jl @@ -14,6 +14,19 @@ import DifferentiationInterface: prepare_gradient, prepare_hessian, prepare_hvp, hvp, jacobian, Constant using ADTypes, SciMLBase +function instantiate_function( + f::OptimizationFunction{true}, x, ::ADTypes.AutoSparse{<:ADTypes.AutoSymbolics}, + args...; kwargs...) + instantiate_function(f, x, ADTypes.AutoSymbolics(), args...; kwargs...) +end +function instantiate_function( + f::OptimizationFunction{true}, cache::OptimizationBase.ReInitCache, + ::ADTypes.AutoSparse{<:ADTypes.AutoSymbolics}, args...; kwargs...) + x = cache.u0 + p = cache.p + + return instantiate_function(f, x, ADTypes.AutoSymbolics(), p, args...; kwargs...) +end function instantiate_function( f::OptimizationFunction{true}, x, adtype::ADTypes.AbstractADType, p = SciMLBase.NullParameters(), num_cons = 0; @@ -180,8 +193,16 @@ function instantiate_function( # Prepare constraint Hessian preparations if needed by lag_h or cons_h if f.cons !== nothing && f.cons_h === nothing && (cons_h == true || lag_h == true) - prep_cons_hess = [prepare_hessian(cons_oop, soadtype, x, Constant(i)) - for i in 1:num_cons] + # This is necessary because DI will create a symbolic index for `Constant(i)` + # to trace into the function, since it assumes `Constant` can change between + # DI calls. + if adtype isa ADTypes.AutoSymbolics + prep_cons_hess = [prepare_hessian(Base.Fix2(cons_oop, i), soadtype, x) + for i in 1:num_cons] + else + prep_cons_hess = [prepare_hessian(cons_oop, soadtype, x, Constant(i)) + for i in 1:num_cons] + end else prep_cons_hess = nothing end @@ -190,9 +211,17 @@ function instantiate_function( if f.cons !== nothing && f.cons_h === nothing && prep_cons_hess !== nothing # Standard cons_h! that returns array of matrices if cons_h == true - cons_h! = function (H, θ) - for i in 1:num_cons - hessian!(cons_oop, H[i], prep_cons_hess[i], soadtype, θ, Constant(i)) + if adtype isa ADTypes.AutoSymbolics + cons_h! = function (H, θ) + for i in 1:num_cons + hessian!(Base.Fix2(cons_oop, i), H[i], prep_cons_hess[i], soadtype, θ) + end + end + else + cons_h! = function (H, θ) + for i in 1:num_cons + hessian!(cons_oop, H[i], prep_cons_hess[i], soadtype, θ, Constant(i)) + end end end else diff --git a/lib/OptimizationBase/src/adtypes.jl b/lib/OptimizationBase/src/adtypes.jl index 071528dc4..9f69fd93b 100644 --- a/lib/OptimizationBase/src/adtypes.jl +++ b/lib/OptimizationBase/src/adtypes.jl @@ -88,44 +88,6 @@ Hessian is not defined via ForwardDiff. """ AutoForwardDiff -""" - AutoModelingToolkit <: AbstractADType - -An AbstractADType choice for use in OptimizationFunction for automatically -generating the unspecified derivative functions. Usage: - -```julia -OptimizationFunction(f, AutoModelingToolkit(); kwargs...) -``` - -This uses the [ModelingToolkit.jl](https://github.com/SciML/ModelingToolkit.jl) -package's `modelingtookitize` functionality to generate the derivatives and other fields of an `OptimizationFunction`. -This backend creates the symbolic expressions for the objective and its derivatives as well as -the constraints and their derivatives. Through `structural_simplify`, it enforces simplifications -that can reduce the number of operations needed to compute the derivatives of the constraints. This automatically -generates the expression graphs that some solver interfaces through OptimizationMOI like -[AmplNLWriter.jl](https://github.com/jump-dev/AmplNLWriter.jl) require. - - - Compatible with GPUs - - Compatible with Hessian-based optimization - - Compatible with Hv-based optimization - - Compatible with constraints - -Note that only the unspecified derivative functions are defined. For example, -if a `hess` function is supplied to the `OptimizationFunction`, then the -Hessian is not generated via ModelingToolkit. - -## Constructor - -```julia -AutoModelingToolkit(false, false) -``` - - - `obj_sparse`: to indicate whether the objective hessian is sparse. - - `cons_sparse`: to indicate whether the constraints' jacobian and hessian are sparse. - -""" -AutoModelingToolkit """ AutoReverseDiff <: AbstractADType @@ -220,7 +182,9 @@ Hessian is not defined via Zygote. AutoZygote function generate_adtype(adtype) - if !(adtype isa SciMLBase.NoAD || adtype isa DifferentiationInterface.SecondOrder || + if adtype isa AutoSymbolics || adtype isa AutoSparse{<:AutoSymbolics} + soadtype = adtype + elseif !(adtype isa SciMLBase.NoAD || adtype isa DifferentiationInterface.SecondOrder || adtype isa AutoZygote) soadtype = DifferentiationInterface.SecondOrder(adtype, adtype) elseif adtype isa AutoZygote @@ -271,7 +235,9 @@ function filled_spad(adtype) end function generate_sparse_adtype(adtype) - if !(adtype.dense_ad isa DifferentiationInterface.SecondOrder) + if adtype isa AutoSparse{<:AutoSymbolics} + soadtype = adtype + elseif !(adtype.dense_ad isa DifferentiationInterface.SecondOrder) adtype = filled_spad(adtype) soadtype = spadtype_to_spsoadtype(adtype) else diff --git a/lib/OptimizationIpopt/Project.toml b/lib/OptimizationIpopt/Project.toml index 1257890dd..433088762 100644 --- a/lib/OptimizationIpopt/Project.toml +++ b/lib/OptimizationIpopt/Project.toml @@ -14,7 +14,7 @@ SymbolicIndexingInterface = "2efcf032-c050-4f8e-a9bb-153293bab1f5" [compat] Ipopt = "1.10.3" LinearAlgebra = "1.10.0" -ModelingToolkit = "10.23" +ModelingToolkit = "11" OptimizationBase = "3, 4" Reexport = "1.2" SciMLBase = "2.122.1" diff --git a/lib/OptimizationMOI/Project.toml b/lib/OptimizationMOI/Project.toml index a2e326911..6243bba5a 100644 --- a/lib/OptimizationMOI/Project.toml +++ b/lib/OptimizationMOI/Project.toml @@ -1,53 +1,60 @@ name = "OptimizationMOI" uuid = "fd9f6733-72f4-499f-8506-86b2bdd0dea1" -authors = ["Vaibhav Dixit and contributors"] version = "0.5.11" +authors = ["Vaibhav Dixit and contributors"] + [deps] -OptimizationBase = "bca83a33-5cc9-4baa-983d-23429ab6bcbb" -Symbolics = "0c5d862f-8b57-4792-8d23-62f2024744c7" LinearAlgebra = "37e2e46d-f89d-539d-b4ee-838fcccc9c8e" -SparseArrays = "2f01184e-e22b-5df5-ae63-d93ebab69eaf" -ModelingToolkit = "961ee093-0014-501f-94e3-6117800e7a78" MathOptInterface = "b8f27783-ece8-5eb3-8dc8-9495eed66fee" -SciMLBase = "0bca4576-84f4-4d90-8ffe-ffa030f20462" +ModelingToolkitBase = "7771a370-6774-4173-bd38-47e70ca0b839" +OptimizationBase = "bca83a33-5cc9-4baa-983d-23429ab6bcbb" Reexport = "189a3867-3050-52da-a836-e630ba90ab69" +SciMLBase = "0bca4576-84f4-4d90-8ffe-ffa030f20462" SciMLStructures = "53ae85a6-f571-4167-b2af-e1d143709226" +SparseArrays = "2f01184e-e22b-5df5-ae63-d93ebab69eaf" SymbolicIndexingInterface = "2efcf032-c050-4f8e-a9bb-153293bab1f5" +SymbolicUtils = "d1185830-fcd6-423d-90d6-eec64667417b" +Symbolics = "0c5d862f-8b57-4792-8d23-62f2024744c7" -[extras] -HiGHS = "87dc4568-4c63-4d18-b0c0-bb2238e4078b" -AmplNLWriter = "7c4d4715-977e-5154-bfe0-e096adeac482" -ReverseDiff = "37e2e3b7-166d-5795-8a7a-e32c996b4267" -Test = "8dfed614-e22c-5e08-85e1-65c5234f0b40" -Ipopt_jll = "9cc047cb-c261-5740-88fc-0cf96f7bdcc7" -Juniper = "2ddba703-00a4-53a7-87a5-e8b9971dde84" -Ipopt = "b6b21f68-93f8-5de0-b562-5493be1d77c9" -NLopt = "76087f3c-5699-56af-9a33-bf431cd00edd" -Zygote = "e88e6eb3-aa80-5325-afca-941959d7151f" +[weakdeps] +ModelingToolkit = "961ee093-0014-501f-94e3-6117800e7a78" [sources] OptimizationBase = {path = "../OptimizationBase"} [compat] -HiGHS = "1" -OptimizationBase = "3.3.1, 4" -Test = "1.6" -Symbolics = "6" AmplNLWriter = "1" -LinearAlgebra = "1" +HiGHS = "1" +Ipopt = "1.10.4" Ipopt_jll = "300.1400" Juniper = "0.9" -Ipopt = "1.10.4" +LinearAlgebra = "1" +MathOptInterface = "1.40.2" +ModelingToolkit = "11" +ModelingToolkitBase = "1" NLopt = "1" -SciMLBase = "2.122.1" +OptimizationBase = "3.3.1, 4" +Reexport = "1.2" +SciMLBase = "2.130" +SciMLStructures = "1" SparseArrays = "1.6" -ModelingToolkit = "10.23" SymbolicIndexingInterface = "0.3" -julia = "1.10" +SymbolicUtils = "4.9.2" +Symbolics = "6, 7" +Test = "1.6" Zygote = "0.6, 0.7" -MathOptInterface = "1.40.2" -Reexport = "1.2" -SciMLStructures = "1" +julia = "1.10" + +[extras] +AmplNLWriter = "7c4d4715-977e-5154-bfe0-e096adeac482" +HiGHS = "87dc4568-4c63-4d18-b0c0-bb2238e4078b" +Ipopt = "b6b21f68-93f8-5de0-b562-5493be1d77c9" +Ipopt_jll = "9cc047cb-c261-5740-88fc-0cf96f7bdcc7" +Juniper = "2ddba703-00a4-53a7-87a5-e8b9971dde84" +NLopt = "76087f3c-5699-56af-9a33-bf431cd00edd" +ReverseDiff = "37e2e3b7-166d-5795-8a7a-e32c996b4267" +Test = "8dfed614-e22c-5e08-85e1-65c5234f0b40" +Zygote = "e88e6eb3-aa80-5325-afca-941959d7151f" [targets] test = ["AmplNLWriter", "HiGHS", "Ipopt", "Ipopt_jll", "Juniper", "NLopt", "ReverseDiff", "Test", "Zygote"] diff --git a/lib/OptimizationMOI/src/OptimizationMOI.jl b/lib/OptimizationMOI/src/OptimizationMOI.jl index 1678424a1..7f24d708f 100644 --- a/lib/OptimizationMOI/src/OptimizationMOI.jl +++ b/lib/OptimizationMOI/src/OptimizationMOI.jl @@ -7,10 +7,11 @@ using SciMLBase using SciMLStructures using SymbolicIndexingInterface using SparseArrays -import ModelingToolkit: parameters, unknowns, varmap_to_vars, mergedefaults, toexpr -import ModelingToolkit -const MTK = ModelingToolkit +import ModelingToolkitBase: parameters, unknowns, varmap_to_vars, mergedefaults, toexpr +import ModelingToolkitBase +const MTK = ModelingToolkitBase using Symbolics +import SymbolicUtils as SU using LinearAlgebra const MOI = MathOptInterface @@ -217,7 +218,7 @@ function convert_to_expr(eq, expr_map; expand_expr = false) Symbolics.expand(eq) end end - expr = ModelingToolkit.toexpr(eq) + expr = ModelingToolkitBase.toexpr(eq) expr = rep_pars_vals!(expr, expr_map) expr = symbolify!(expr) @@ -225,12 +226,12 @@ function convert_to_expr(eq, expr_map; expand_expr = false) end function get_expr_map(sys) - dvs = ModelingToolkit.unknowns(sys) - ps = ModelingToolkit.parameters(sys) + dvs = ModelingToolkitBase.unknowns(sys) + ps = ModelingToolkitBase.parameters(sys) return vcat( - [ModelingToolkit.toexpr(_s) => Expr(:ref, :x, i) + [ModelingToolkitBase.toexpr(_s) => Expr(:ref, :x, i) for (i, _s) in enumerate(dvs)], - [ModelingToolkit.toexpr(_p) => Expr(:ref, :p, i) + [ModelingToolkitBase.toexpr(_p) => Expr(:ref, :p, i) for (i, _p) in enumerate(ps)]) end @@ -280,6 +281,73 @@ function repl_getindex!(expr::Expr) return expr end +function generate_exprs(prob::OptimizationProblem) + f = prob.f + if f.expr !== nothing + return f + end + pobj = prob.p + if pobj isa SciMLBase.NullParameters + pobj = Float64[] + end + @assert pobj isa Vector{<:Number} """ + Unsupported parameter object type $(typeof(pobj)) for expression construction. + """ + @variables x[1:length(prob.u0)] p[1:length(pobj)] + obj = prob.f.f(collect(x), collect(p)) + obj_expr = SU.Code.toexpr(SU.expand(SU.unwrap(obj))) + symbolify!(obj_expr) + if prob.lcons === nothing && prob.ucons === nothing + return SciMLBase.remake(f; expr = obj_expr) + end + if SciMLBase.isinplace(prob) + cons_expr = zeros(Num, length(prob.lcons)) + prob.f.cons(cons_expr, collect(x), collect(p)) + else + cons_expr = prob.f.cons(collect(x), collect(p)) + end + cons_expr = SU.Code.toexpr.(SU.expand.(SU.unwrap.(cons_expr)))::Vector{Expr} + for i in eachindex(cons_expr) + cons_expr[i] = if prob.lcons[i] == prob.ucons[i] + Expr(:call, :(==), cons_expr[i], prob.lcons[i]) + elseif isinf(prob.lcons[i]) + Expr(:call, :(<=), cons_expr[i], prob.ucons[i]) + elseif isinf(prob.ucons[i]) + Expr(:call, :(>=), cons_expr[i], prob.lcons[i]) + else + Expr(:comparison, prob.lcons[i], :(<=), cons_expr[i], :(<=), prob.ucons[i]) + end + end + symbolify!(obj_expr) + symbolify!.(cons_expr) + newf = SciMLBase.remake(f; expr = obj_expr, cons_expr) + return newf +end + +function process_system_exprs(prob::OptimizationProblem, f::OptimizationFunction) + @assert f.sys !== nothing + expr_map = get_expr_map(prob.f.sys) + expr = convert_to_expr(f.expr, expr_map; expand_expr = false) + expr = repl_getindex!(expr) + cons = MTK.constraints(f.sys) + cons_expr = Vector{Expr}(undef, length(cons)) + Threads.@sync for i in eachindex(cons) + Threads.@spawn if prob.lcons[i] == prob.ucons[i] == 0 + cons_expr[i] = Expr(:call, :(==), + repl_getindex!(convert_to_expr(f.cons_expr[i], + expr_map; + expand_expr = false)), 0) + else + # MTK canonicalizes the expression form + cons_expr[i] = Expr(:call, :(<=), + repl_getindex!(convert_to_expr(f.cons_expr[i], + expr_map; + expand_expr = false)), 0) + end + end + return expr, cons_expr +end + include("nlp.jl") include("moi.jl") diff --git a/lib/OptimizationMOI/src/moi.jl b/lib/OptimizationMOI/src/moi.jl index 6f66fee6f..db298ec56 100644 --- a/lib/OptimizationMOI/src/moi.jl +++ b/lib/OptimizationMOI/src/moi.jl @@ -18,7 +18,8 @@ function MOIOptimizationCache(prob::OptimizationProblem, opt; kwargs...) if isnothing(f.sys) if f.adtype isa OptimizationBase.AutoSymbolics num_cons = prob.ucons === nothing ? 0 : length(prob.ucons) - f = OptimizationBase.instantiate_function(prob.f, + f = generate_exprs(prob) + f = OptimizationBase.instantiate_function(f, reinit_cache, prob.f.adtype, num_cons) @@ -28,34 +29,18 @@ function MOIOptimizationCache(prob::OptimizationProblem, opt; kwargs...) end # TODO: check if the problem is at most bilinear, i.e. affine and or quadratic terms in two variables - expr_map = get_expr_map(prob.f.sys) - expr = convert_to_expr(f.expr, expr_map; expand_expr = false) - expr = repl_getindex!(expr) - cons = MTK.constraints(f.sys) - cons_expr = Vector{Expr}(undef, length(cons)) - Threads.@sync for i in eachindex(cons) - Threads.@spawn if prob.lcons[i] == prob.ucons[i] == 0 - cons_expr[i] = Expr(:call, :(==), - repl_getindex!(convert_to_expr(f.cons_expr[i], - expr_map; - expand_expr = false)), 0) - else - # MTK canonicalizes the expression form - cons_expr[i] = Expr(:call, :(<=), - repl_getindex!(convert_to_expr(f.cons_expr[i], - expr_map; - expand_expr = false)), 0) - end + if f.sys !== nothing + expr, cons_expr = process_system_exprs(prob, f) + f = remake(f; expr, cons_expr) end - return MOIOptimizationCache(f, reinit_cache, prob.lb, prob.ub, prob.int, prob.sense, - expr, - cons_expr, + f.expr, + f.cons_expr, opt, NamedTuple(kwargs)) end diff --git a/lib/OptimizationMOI/src/nlp.jl b/lib/OptimizationMOI/src/nlp.jl index 57313dea6..74e552ae3 100644 --- a/lib/OptimizationMOI/src/nlp.jl +++ b/lib/OptimizationMOI/src/nlp.jl @@ -113,8 +113,9 @@ function MOIOptimizationNLPCache(prob::OptimizationProblem, num_cons = prob.ucons === nothing ? 0 : length(prob.ucons) if prob.f.adtype isa ADTypes.AutoSymbolics || (prob.f.adtype isa ADTypes.AutoSparse && prob.f.adtype.dense_ad isa ADTypes.AutoSymbolics) + f = generate_exprs(prob) f = OptimizationBase.instantiate_function( - prob.f, reinit_cache, prob.f.adtype, num_cons; + f, reinit_cache, prob.f.adtype, num_cons; g = true, h = true, cons_j = true, cons_h = true) else f = OptimizationBase.instantiate_function( @@ -179,16 +180,7 @@ function MOIOptimizationNLPCache(prob::OptimizationProblem, expr = obj_expr _cons_expr = cons_expr else - expr_map = get_expr_map(sys) - expr = convert_to_expr(obj_expr, expr_map; expand_expr = false) - expr = repl_getindex!(expr) - cons = MTK.constraints(sys) - _cons_expr = Vector{Expr}(undef, length(cons)) - for i in eachindex(cons) - _cons_expr[i] = repl_getindex!(convert_to_expr(cons_expr[i], - expr_map; - expand_expr = false)) - end + expr, _cons_expr = process_system_exprs(prob, f) end evaluator = MOIOptimizationNLPEvaluator(f, @@ -461,8 +453,12 @@ function MOI.objective_expr(evaluator::MOIOptimizationNLPEvaluator) end function MOI.constraint_expr(evaluator::MOIOptimizationNLPEvaluator, i) - # expr has the form f(x,p) == 0 or f(x,p) <= 0 - cons_expr = deepcopy(evaluator.cons_expr[i].args[2]) + cons_expr = evaluator.cons_expr[i] + cons_expr = if Meta.isexpr(cons_expr, :comparison) + deepcopy(cons_expr.args[3]) + else + deepcopy(cons_expr.args[2]) + end repl_getindex!(cons_expr) _replace_parameter_indices!(cons_expr, evaluator.p) _replace_variable_indices!(cons_expr) diff --git a/lib/OptimizationMOI/test/runtests.jl b/lib/OptimizationMOI/test/runtests.jl index 186fd59f9..c9f5113ac 100644 --- a/lib/OptimizationMOI/test/runtests.jl +++ b/lib/OptimizationMOI/test/runtests.jl @@ -1,4 +1,4 @@ -using OptimizationMOI, OptimizationBase, Ipopt, NLopt, Zygote, ModelingToolkit, ReverseDiff +using OptimizationMOI, OptimizationBase, Ipopt, NLopt, Zygote, ModelingToolkitBase, ReverseDiff using AmplNLWriter, Ipopt_jll, Juniper, HiGHS, MathOptInterface using Test, SparseArrays @@ -226,7 +226,7 @@ end @testset "MOI" begin @parameters c = 0.0 - @variables x[1:2]=[0.0, 0.0] [bounds = (c, Inf)] + @variables x[1:2]=[0.0, 0.0] [bounds = ([c, c], [Inf, Inf])] @parameters a = 3.0 @parameters b = 4.0 @parameters d = 2.0 diff --git a/lib/OptimizationMadNLP/Project.toml b/lib/OptimizationMadNLP/Project.toml index 9f5db8288..87e83cd6f 100644 --- a/lib/OptimizationMadNLP/Project.toml +++ b/lib/OptimizationMadNLP/Project.toml @@ -18,7 +18,7 @@ DifferentiationInterface = "0.7" ForwardDiff = "1.2.1" LinearAlgebra = "1.10.0" MadNLP = "0.8.12" -ModelingToolkit = "10.23" +ModelingToolkit = "11" NLPModels = "0.21.5" OptimizationBase = "4.0.2" Reexport = "1.2" diff --git a/lib/OptimizationOptimJL/test/runtests.jl b/lib/OptimizationOptimJL/test/runtests.jl index 6d45be085..21d8fc70a 100644 --- a/lib/OptimizationOptimJL/test/runtests.jl +++ b/lib/OptimizationOptimJL/test/runtests.jl @@ -163,7 +163,7 @@ end @test 10 * sol.objective < l1 optprob = OptimizationFunction(rosenbrock, - OptimizationBase.AutoModelingToolkit(true, false)) + OptimizationBase.AutoSparse(OptimizationBase.AutoSymbolics())) prob = OptimizationProblem(optprob, x0, _p) sol = solve(prob, Optim.Newton()) @test 10 * sol.objective < l1