From 0927d5e4713a20b803e622a147c8bf3849198c0f Mon Sep 17 00:00:00 2001 From: franckgaga Date: Fri, 18 Jul 2025 17:41:09 -0400 Subject: [PATCH 01/17] wip: trying new `Ipopt._VectorNonlinearOracle` exp. feature --- src/controller/nonlinmpc.jl | 68 +++++++++++++++++++++++++++++++++++-- 1 file changed, 65 insertions(+), 3 deletions(-) diff --git a/src/controller/nonlinmpc.jl b/src/controller/nonlinmpc.jl index 5b5d2050a..08442d073 100644 --- a/src/controller/nonlinmpc.jl +++ b/src/controller/nonlinmpc.jl @@ -498,7 +498,9 @@ end Init the nonlinear optimization for [`NonLinMPC`](@ref) controllers. """ -function init_optimization!(mpc::NonLinMPC, model::SimModel, optim::JuMP.GenericModel) +function init_optimization!( + mpc::NonLinMPC, model::SimModel, optim::JuMP.GenericModel{JNT} +) where JNT<:Real # --- variables and linear constraints --- con, transcription = mpc.con, mpc.transcription nZ̃ = length(mpc.Z̃) @@ -527,8 +529,68 @@ function init_optimization!(mpc::NonLinMPC, model::SimModel, optim::JuMP.Generic ) @operator(optim, J, nZ̃, Jfunc, ∇Jfunc!) @objective(optim, Min, J(Z̃var...)) - init_nonlincon!(mpc, model, transcription, gfuncs, ∇gfuncs!, geqfuncs, ∇geqfuncs!) - set_nonlincon!(mpc, model, transcription, optim) + if JuMP.solver_name(optim) ≠ "Ipopt" + init_nonlincon!(mpc, model, transcription, gfuncs, ∇gfuncs!, geqfuncs, ∇geqfuncs!) + set_nonlincon!(mpc, model, transcription, optim) + else + # Test new experimental feature: + + + J::Vector{JNT} = zeros(JNT, 1) + ΔŨ::Vector{JNT} = zeros(JNT, nΔŨ) + x̂0end::Vector{JNT} = zeros(JNT, nx̂) + K0::Vector{JNT} = zeros(JNT, nK) + Ue::Vector{JNT}, Ŷe::Vector{JNT} = zeros(JNT, nUe), zeros(JNT, nŶe) + U0::Vector{JNT}, Ŷ0::Vector{JNT} = zeros(JNT, nU), zeros(JNT, nŶ) + Û0::Vector{JNT}, X̂0::Vector{JNT} = zeros(JNT, nU), zeros(JNT, nX̂) + gc::Vector{JNT}, g::Vector{JNT} = zeros(JNT, nc), zeros(JNT, ng) + geq::Vector{JNT} = zeros(JNT, neq) + + geq_min = zeros(JNT, mpc.con.neq) + geq_max = zeros(JNT, mpc.con.neq) + + + function geqfunc!(geq, Z̃, ΔŨ, x̂0end, Ue, Ŷe, U0, Ŷ0, Û0, K0, X̂0, gc, g) + update_predictions!(ΔŨ, x̂0end, Ue, Ŷe, U0, Ŷ0, Û0, K0, X̂0, gc, g, geq, mpc, Z̃) + return nothing + end + Z̃_∇geq = fill(myNaN, nZ̃) # NaN to force update_predictions! at first call + ∇geq_context = ( + Cache(ΔŨ), Cache(x̂0end), Cache(Ue), Cache(Ŷe), Cache(U0), Cache(Ŷ0), + Cache(Û0), Cache(K0), Cache(X̂0), + Cache(gc), Cache(g) + ) + ∇geq_prep = prepare_jacobian(geqfunc!, geq, jac, Z̃_∇geq, ∇geq_context...; strict) + ∇geq = init_diffmat(JNT, jac, ∇geq_prep, nZ̃, neq) + + function geqfunc_set!(geq, Z̃) + return geqfunc!(geq, Z̃, ΔŨ, x̂0end, Ue, Ŷe, U0, Ŷ0, Û0, K0, X̂0, gc, g) + end + function ∇geqfunc_set!(∇geq, Z̃) + value_and_jacobian!(geqfunc!, geq, ∇geq, ∇geq_prep, jac, Z̃, ∇geq_context...) + return nothing + end + + #= + # Langragian of the optimization problem: + function Lfunc!(Z̃, μ, ΔŨ, x̂0end, Ue, Ŷe, U0, Ŷ0, Û0, K0, X̂0, gc, g, geq) + update_predictions!(ΔŨ, x̂0end, Ue, Ŷe, U0, Ŷ0, Û0, K0, X̂0, gc, g, geq, mpc, Z̃) + J = obj_nonlinprog!(Ŷ0, U0, mpc, model, Ue, Ŷe, ΔŨ) + L = J + dot(μ, geq) + return L + end + =# + + + set = Ipopt._VectorNonlinearOracle(; + mpc.con.neq, + geq_min, + geq_max, + geqfunc_set!, + jacobian_structure, + ∇geqfunc_set! + ) + end return nothing end From 888303432746b78908b2f598917f699f38e02519 Mon Sep 17 00:00:00 2001 From: franckgaga Date: Sat, 19 Jul 2025 15:58:16 -0400 Subject: [PATCH 02/17] wip: starting to work! --- src/controller/nonlinmpc.jl | 29 +++++++++++++++++++++-------- src/general.jl | 4 ++-- 2 files changed, 23 insertions(+), 10 deletions(-) diff --git a/src/controller/nonlinmpc.jl b/src/controller/nonlinmpc.jl index 08442d073..f0217d893 100644 --- a/src/controller/nonlinmpc.jl +++ b/src/controller/nonlinmpc.jl @@ -535,8 +535,16 @@ function init_optimization!( else # Test new experimental feature: + model = mpc.estim.model + grad, jac = mpc.gradient, mpc.jacobian + nu, ny, nx̂, nϵ, nk = model.nu, model.ny, mpc.estim.nx̂, mpc.nϵ, model.nk + Hp, Hc = mpc.Hp, mpc.Hc + ng, nc, neq = length(mpc.con.i_g), mpc.con.nc, mpc.con.neq + nZ̃, nU, nŶ, nX̂, nK = length(mpc.Z̃), Hp*nu, Hp*ny, Hp*nx̂, Hp*nk + nΔŨ, nUe, nŶe = nu*Hc + nϵ, nU + nu, nŶ + ny + strict = Val(true) + myNaN = convert(JNT, NaN) - J::Vector{JNT} = zeros(JNT, 1) ΔŨ::Vector{JNT} = zeros(JNT, nΔŨ) x̂0end::Vector{JNT} = zeros(JNT, nx̂) K0::Vector{JNT} = zeros(JNT, nK) @@ -566,11 +574,15 @@ function init_optimization!( function geqfunc_set!(geq, Z̃) return geqfunc!(geq, Z̃, ΔŨ, x̂0end, Ue, Ŷe, U0, Ŷ0, Û0, K0, X̂0, gc, g) end - function ∇geqfunc_set!(∇geq, Z̃) + function ∇geqfunc_set!(∇geq_vec, Z̃) value_and_jacobian!(geqfunc!, geq, ∇geq, ∇geq_prep, jac, Z̃, ∇geq_context...) + ∇geq_vec .= nonzeros(∇geq) return nothing end + I_∇geq, J_∇geq = SparseArrays.findnz(∇geq) + ∇geq_structure = collect(zip(I_∇geq, J_∇geq)) + #= # Langragian of the optimization problem: function Lfunc!(Z̃, μ, ΔŨ, x̂0end, Ue, Ŷe, U0, Ŷ0, Û0, K0, X̂0, gc, g, geq) @@ -583,13 +595,14 @@ function init_optimization!( set = Ipopt._VectorNonlinearOracle(; - mpc.con.neq, - geq_min, - geq_max, - geqfunc_set!, - jacobian_structure, - ∇geqfunc_set! + dimension = nZ̃, + l = geq_min, + u = geq_max, + eval_f = geqfunc_set!, + jacobian_structure = ∇geq_structure, + eval_jacobian = ∇geqfunc_set! ) + @constraint(optim, Z̃var in set) end return nothing end diff --git a/src/general.jl b/src/general.jl index f5e8ecb95..127707184 100644 --- a/src/general.jl +++ b/src/general.jl @@ -58,8 +58,8 @@ function limit_solve_time(optim::GenericModel, Ts) end "Init a differentiation result matrix as dense or sparse matrix, as required by `backend`." -init_diffmat(T, backend::AbstractADType, _ , nx , ny) = Matrix{T}(undef, ny, nx) -init_diffmat(T, backend::AutoSparse ,prep , _ , _ ) = similar(sparsity_pattern(prep), T) +init_diffmat(T, ::AbstractADType, _ , nx , ny) = Matrix{T}(undef, ny, nx) +init_diffmat(T, ::AutoSparse ,prep , _ , _ ) = similar(sparsity_pattern(prep), T) "Verify that x and y elements are different using `!==`." isdifferent(x, y) = any(xi !== yi for (xi, yi) in zip(x, y)) From 9157b8a208c4b74032b93fb57a346a044a4e113b Mon Sep 17 00:00:00 2001 From: franckgaga Date: Mon, 21 Jul 2025 15:36:17 -0400 Subject: [PATCH 03/17] wip: idem --- src/controller/construct.jl | 2 +- src/controller/nonlinmpc.jl | 54 +++++++++++++++++++++++++++++++++---- 2 files changed, 50 insertions(+), 6 deletions(-) diff --git a/src/controller/construct.jl b/src/controller/construct.jl index 711cb3040..34e2657cd 100644 --- a/src/controller/construct.jl +++ b/src/controller/construct.jl @@ -434,7 +434,7 @@ function setconstraint!( JuMP.delete(optim, optim[:linconstraint]) JuMP.unregister(optim, :linconstraint) @constraint(optim, linconstraint, A*Z̃var .≤ b) - set_nonlincon!(mpc, model, transcription, optim) + # set_nonlincon!(mpc, model, transcription, optim) else i_b, i_g = init_matconstraint_mpc( model, transcription, nc, diff --git a/src/controller/nonlinmpc.jl b/src/controller/nonlinmpc.jl index f0217d893..30d5f6db6 100644 --- a/src/controller/nonlinmpc.jl +++ b/src/controller/nonlinmpc.jl @@ -536,7 +536,7 @@ function init_optimization!( # Test new experimental feature: model = mpc.estim.model - grad, jac = mpc.gradient, mpc.jacobian + jac = mpc.jacobian nu, ny, nx̂, nϵ, nk = model.nu, model.ny, mpc.estim.nx̂, mpc.nϵ, model.nk Hp, Hc = mpc.Hp, mpc.Hc ng, nc, neq = length(mpc.con.i_g), mpc.con.nc, mpc.con.neq @@ -544,6 +544,7 @@ function init_optimization!( nΔŨ, nUe, nŶe = nu*Hc + nϵ, nU + nu, nŶ + ny strict = Val(true) myNaN = convert(JNT, NaN) + myInf = convert(JNT, Inf) ΔŨ::Vector{JNT} = zeros(JNT, nΔŨ) x̂0end::Vector{JNT} = zeros(JNT, nx̂) @@ -554,9 +555,47 @@ function init_optimization!( gc::Vector{JNT}, g::Vector{JNT} = zeros(JNT, nc), zeros(JNT, ng) geq::Vector{JNT} = zeros(JNT, neq) - geq_min = zeros(JNT, mpc.con.neq) - geq_max = zeros(JNT, mpc.con.neq) + function gfunc!(g, Z̃, ΔŨ, x̂0end, Ue, Ŷe, U0, Ŷ0, Û0, K0, X̂0, gc, geq) + update_predictions!(ΔŨ, x̂0end, Ue, Ŷe, U0, Ŷ0, Û0, K0, X̂0, gc, g, geq, mpc, Z̃) + return nothing + end + Z̃_∇g = fill(myNaN, nZ̃) # NaN to force update_predictions! at first call + ∇g_context = ( + Cache(ΔŨ), Cache(x̂0end), Cache(Ue), Cache(Ŷe), Cache(U0), Cache(Ŷ0), + Cache(Û0), Cache(K0), Cache(X̂0), + Cache(gc), Cache(geq), + ) + # temporarily enable all the inequality constraints for sparsity detection: + mpc.con.i_g[1:end-nc] .= true + ∇g_prep = prepare_jacobian(gfunc!, g, jac, Z̃_∇g, ∇g_context...; strict) + mpc.con.i_g[1:end-nc] .= false + ∇g = init_diffmat(JNT, jac, ∇g_prep, nZ̃, ng) + + + function gfunc_set!(g, Z̃) + return gfunc!(g, Z̃, ΔŨ, x̂0end, Ue, Ŷe, U0, Ŷ0, Û0, K0, X̂0, gc, geq) + end + function ∇gfunc_set!(∇g_vec, Z̃) + value_and_jacobian!(gfunc!, g, ∇g, ∇g_prep, jac, Z̃, ∇g_context...) + ∇g_vec .= nonzeros(∇g) + return nothing + end + + g_min = fill(-myInf, ng) + g_max = zeros(JNT, ng) + + I_∇g, J_∇g = SparseArrays.findnz(∇g) + ∇g_structure = collect(zip(I_∇g, J_∇g)) + g_set = Ipopt._VectorNonlinearOracle(; + dimension = nZ̃, + l = g_min, + u = g_max, + eval_f = gfunc_set!, + jacobian_structure = ∇g_structure, + eval_jacobian = ∇gfunc_set! + ) + @constraint(optim, Z̃var in g_set) function geqfunc!(geq, Z̃, ΔŨ, x̂0end, Ue, Ŷe, U0, Ŷ0, Û0, K0, X̂0, gc, g) update_predictions!(ΔŨ, x̂0end, Ue, Ŷe, U0, Ŷ0, Û0, K0, X̂0, gc, g, geq, mpc, Z̃) @@ -580,6 +619,10 @@ function init_optimization!( return nothing end + + geq_min = zeros(JNT, mpc.con.neq) + geq_max = zeros(JNT, mpc.con.neq) + I_∇geq, J_∇geq = SparseArrays.findnz(∇geq) ∇geq_structure = collect(zip(I_∇geq, J_∇geq)) @@ -594,7 +637,7 @@ function init_optimization!( =# - set = Ipopt._VectorNonlinearOracle(; + geq_set = Ipopt._VectorNonlinearOracle(; dimension = nZ̃, l = geq_min, u = geq_max, @@ -602,7 +645,8 @@ function init_optimization!( jacobian_structure = ∇geq_structure, eval_jacobian = ∇geqfunc_set! ) - @constraint(optim, Z̃var in set) + @constraint(optim, Z̃var in geq_set) + end return nothing end From 4c898c95f5f006c863fc8731bc9958a9dc17094b Mon Sep 17 00:00:00 2001 From: franckgaga Date: Tue, 22 Jul 2025 14:22:30 -0400 Subject: [PATCH 04/17] wip: idem --- src/controller/construct.jl | 5 +++- src/controller/nonlinmpc.jl | 47 ++++++++++++++++++++++++++----------- 2 files changed, 37 insertions(+), 15 deletions(-) diff --git a/src/controller/construct.jl b/src/controller/construct.jl index 34e2657cd..62977b34e 100644 --- a/src/controller/construct.jl +++ b/src/controller/construct.jl @@ -434,7 +434,10 @@ function setconstraint!( JuMP.delete(optim, optim[:linconstraint]) JuMP.unregister(optim, :linconstraint) @constraint(optim, linconstraint, A*Z̃var .≤ b) - # set_nonlincon!(mpc, model, transcription, optim) + # TODO: change this + if JuMP.solver_name(optim) ≠ "Ipopt" + set_nonlincon!(mpc, model, transcription, optim) + end else i_b, i_g = init_matconstraint_mpc( model, transcription, nc, diff --git a/src/controller/nonlinmpc.jl b/src/controller/nonlinmpc.jl index 30d5f6db6..7bb118996 100644 --- a/src/controller/nonlinmpc.jl +++ b/src/controller/nonlinmpc.jl @@ -555,6 +555,9 @@ function init_optimization!( gc::Vector{JNT}, g::Vector{JNT} = zeros(JNT, nc), zeros(JNT, ng) geq::Vector{JNT} = zeros(JNT, neq) + + + function gfunc!(g, Z̃, ΔŨ, x̂0end, Ue, Ŷe, U0, Ŷ0, Û0, K0, X̂0, gc, geq) update_predictions!(ΔŨ, x̂0end, Ue, Ŷe, U0, Ŷ0, Û0, K0, X̂0, gc, g, geq, mpc, Z̃) return nothing @@ -572,17 +575,23 @@ function init_optimization!( ∇g = init_diffmat(JNT, jac, ∇g_prep, nZ̃, ng) - function gfunc_set!(g, Z̃) - return gfunc!(g, Z̃, ΔŨ, x̂0end, Ue, Ŷe, U0, Ŷ0, Û0, K0, X̂0, gc, geq) + function update_con!(g, ∇g, Z̃_∇g, Z̃_arg) + if isdifferent(Z̃_arg, Z̃_∇g) + Z̃_∇g .= Z̃_arg + value_and_jacobian!(gfunc!, g, ∇g, ∇g_prep, jac, Z̃_∇g, ∇g_context...) + end end - function ∇gfunc_set!(∇g_vec, Z̃) - value_and_jacobian!(gfunc!, g, ∇g, ∇g_prep, jac, Z̃, ∇g_context...) - ∇g_vec .= nonzeros(∇g) - return nothing + function gfunc_set!(g_arg, Z̃_arg) + update_con!(g, ∇g, Z̃_∇g, Z̃_arg) + return g_arg .= g + end + function ∇gfunc_set!(∇g_arg, Z̃_arg) + update_con!(g, ∇g, Z̃_∇g, Z̃_arg) + return ∇g_arg .= nonzeros(∇g) end g_min = fill(-myInf, ng) - g_max = zeros(JNT, ng) + g_max = fill(+myInf, ng) I_∇g, J_∇g = SparseArrays.findnz(∇g) ∇g_structure = collect(zip(I_∇g, J_∇g)) @@ -597,6 +606,10 @@ function init_optimization!( ) @constraint(optim, Z̃var in g_set) + + + + function geqfunc!(geq, Z̃, ΔŨ, x̂0end, Ue, Ŷe, U0, Ŷ0, Û0, K0, X̂0, gc, g) update_predictions!(ΔŨ, x̂0end, Ue, Ŷe, U0, Ŷ0, Û0, K0, X̂0, gc, g, geq, mpc, Z̃) return nothing @@ -610,15 +623,21 @@ function init_optimization!( ∇geq_prep = prepare_jacobian(geqfunc!, geq, jac, Z̃_∇geq, ∇geq_context...; strict) ∇geq = init_diffmat(JNT, jac, ∇geq_prep, nZ̃, neq) - function geqfunc_set!(geq, Z̃) - return geqfunc!(geq, Z̃, ΔŨ, x̂0end, Ue, Ŷe, U0, Ŷ0, Û0, K0, X̂0, gc, g) + + function update_con_eq!(geq, ∇geq, Z̃_∇geq, Z̃_arg) + if isdifferent(Z̃_arg, Z̃_∇geq) + Z̃_∇geq .= Z̃_arg + value_and_jacobian!(geqfunc!, geq, ∇geq, ∇geq_prep, jac, Z̃_∇geq, ∇geq_context...) + end end - function ∇geqfunc_set!(∇geq_vec, Z̃) - value_and_jacobian!(geqfunc!, geq, ∇geq, ∇geq_prep, jac, Z̃, ∇geq_context...) - ∇geq_vec .= nonzeros(∇geq) - return nothing + function geqfunc_set!(geq_arg, Z̃_arg) + update_con_eq!(geq, ∇geq, Z̃_∇geq, Z̃_arg) + return geq_arg .= geq + end + function ∇geqfunc_set!(∇geq_arg, Z̃_arg) + update_con_eq!(geq, ∇geq, Z̃_∇geq, Z̃_arg) + return ∇geq_arg .= nonzeros(∇geq) end - geq_min = zeros(JNT, mpc.con.neq) geq_max = zeros(JNT, mpc.con.neq) From 9b9dd6c33f098d24645fbff7fd97081070cf0b33 Mon Sep 17 00:00:00 2001 From: franckgaga Date: Wed, 23 Jul 2025 09:16:27 -0400 Subject: [PATCH 05/17] removed: useless variable in `MultipleShooting` and `NonLinModel` --- src/controller/transcription.jl | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/controller/transcription.jl b/src/controller/transcription.jl index ba9f92103..0ed7b6860 100644 --- a/src/controller/transcription.jl +++ b/src/controller/transcription.jl @@ -1173,7 +1173,7 @@ custom constraints are include in the `g` vector. function con_nonlinprog!( g, mpc::PredictiveController, ::NonLinModel, ::TranscriptionMethod, x̂0end, Ŷ0, gc, ϵ ) - nx̂, nŶ = length(x̂0end), length(Ŷ0) + nŶ = length(Ŷ0) for i in eachindex(g) mpc.con.i_g[i] || continue if i ≤ nŶ From 4c97648517fab7da15a7d4c75875fd4f5b71f2a3 Mon Sep 17 00:00:00 2001 From: franckgaga Date: Wed, 23 Jul 2025 11:12:20 -0400 Subject: [PATCH 06/17] wip: idem --- src/controller/construct.jl | 2 +- src/controller/nonlinmpc.jl | 26 +++++++++++++------------- src/general.jl | 18 ++++++++++++++++-- 3 files changed, 30 insertions(+), 16 deletions(-) diff --git a/src/controller/construct.jl b/src/controller/construct.jl index 62977b34e..a28bdacf3 100644 --- a/src/controller/construct.jl +++ b/src/controller/construct.jl @@ -434,7 +434,7 @@ function setconstraint!( JuMP.delete(optim, optim[:linconstraint]) JuMP.unregister(optim, :linconstraint) @constraint(optim, linconstraint, A*Z̃var .≤ b) - # TODO: change this + # TODO: change this !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! if JuMP.solver_name(optim) ≠ "Ipopt" set_nonlincon!(mpc, model, transcription, optim) end diff --git a/src/controller/nonlinmpc.jl b/src/controller/nonlinmpc.jl index 7bb118996..d0dcf5d01 100644 --- a/src/controller/nonlinmpc.jl +++ b/src/controller/nonlinmpc.jl @@ -533,7 +533,7 @@ function init_optimization!( init_nonlincon!(mpc, model, transcription, gfuncs, ∇gfuncs!, geqfuncs, ∇geqfuncs!) set_nonlincon!(mpc, model, transcription, optim) else - # Test new experimental feature: + # ========= Test new experimental feature ======================================== model = mpc.estim.model jac = mpc.jacobian @@ -580,21 +580,23 @@ function init_optimization!( Z̃_∇g .= Z̃_arg value_and_jacobian!(gfunc!, g, ∇g, ∇g_prep, jac, Z̃_∇g, ∇g_context...) end + return nothing end function gfunc_set!(g_arg, Z̃_arg) update_con!(g, ∇g, Z̃_∇g, Z̃_arg) - return g_arg .= g + g_arg .= g + return nothing end function ∇gfunc_set!(∇g_arg, Z̃_arg) update_con!(g, ∇g, Z̃_∇g, Z̃_arg) - return ∇g_arg .= nonzeros(∇g) + diffmat2vec!(∇g_arg, ∇g) + return nothing end g_min = fill(-myInf, ng) g_max = fill(+myInf, ng) - I_∇g, J_∇g = SparseArrays.findnz(∇g) - ∇g_structure = collect(zip(I_∇g, J_∇g)) + ∇g_structure = init_diffstructure(∇g) g_set = Ipopt._VectorNonlinearOracle(; dimension = nZ̃, @@ -606,10 +608,6 @@ function init_optimization!( ) @constraint(optim, Z̃var in g_set) - - - - function geqfunc!(geq, Z̃, ΔŨ, x̂0end, Ue, Ŷe, U0, Ŷ0, Û0, K0, X̂0, gc, g) update_predictions!(ΔŨ, x̂0end, Ue, Ŷe, U0, Ŷ0, Û0, K0, X̂0, gc, g, geq, mpc, Z̃) return nothing @@ -629,21 +627,23 @@ function init_optimization!( Z̃_∇geq .= Z̃_arg value_and_jacobian!(geqfunc!, geq, ∇geq, ∇geq_prep, jac, Z̃_∇geq, ∇geq_context...) end + return nothing end function geqfunc_set!(geq_arg, Z̃_arg) update_con_eq!(geq, ∇geq, Z̃_∇geq, Z̃_arg) - return geq_arg .= geq + geq_arg .= geq + return nothing end function ∇geqfunc_set!(∇geq_arg, Z̃_arg) update_con_eq!(geq, ∇geq, Z̃_∇geq, Z̃_arg) - return ∇geq_arg .= nonzeros(∇geq) + diffmat2vec!(∇geq_arg, ∇geq) + return nothing end geq_min = zeros(JNT, mpc.con.neq) geq_max = zeros(JNT, mpc.con.neq) - I_∇geq, J_∇geq = SparseArrays.findnz(∇geq) - ∇geq_structure = collect(zip(I_∇geq, J_∇geq)) + ∇geq_structure = init_diffstructure(∇geq) #= # Langragian of the optimization problem: diff --git a/src/general.jl b/src/general.jl index 127707184..0cd4849da 100644 --- a/src/general.jl +++ b/src/general.jl @@ -58,8 +58,22 @@ function limit_solve_time(optim::GenericModel, Ts) end "Init a differentiation result matrix as dense or sparse matrix, as required by `backend`." -init_diffmat(T, ::AbstractADType, _ , nx , ny) = Matrix{T}(undef, ny, nx) -init_diffmat(T, ::AutoSparse ,prep , _ , _ ) = similar(sparsity_pattern(prep), T) +init_diffmat(T, ::AbstractADType, _ , nx, ny) = zeros(T, ny, nx) +function init_diffmat(T, ::AutoSparse, prep , _ , _ ) + A = similar(sparsity_pattern(prep), T) + return A .= 0 +end + +"Init the sparsity structure of matrix `A` as required by `JuMP.jl`." +function init_diffstructure(A::AbstractSparseMatrix) + I, J = findnz(A) + return collect(zip(I, J)) +end +init_diffstructure(A::AbstractMatrix)= Tuple.(CartesianIndices(A))[:] + +"Store the differentiation matrix `A` in the vector `v` as required by `JuMP.jl.`" +diffmat2vec!(v::AbstractVector, A::AbstractSparseMatrix) = v .= nonzeros(A) +diffmat2vec!(v::AbstractVector, A::AbstractMatrix) = v[:] = A "Verify that x and y elements are different using `!==`." isdifferent(x, y) = any(xi !== yi for (xi, yi) in zip(x, y)) From 34d818b349d184f4c99623533dfc62f332005e79 Mon Sep 17 00:00:00 2001 From: franckgaga Date: Wed, 23 Jul 2025 12:16:28 -0400 Subject: [PATCH 07/17] wip: added support for custom constraints `gc` --- src/controller/nonlinmpc.jl | 1 + 1 file changed, 1 insertion(+) diff --git a/src/controller/nonlinmpc.jl b/src/controller/nonlinmpc.jl index d0dcf5d01..4fdb1502c 100644 --- a/src/controller/nonlinmpc.jl +++ b/src/controller/nonlinmpc.jl @@ -595,6 +595,7 @@ function init_optimization!( g_min = fill(-myInf, ng) g_max = fill(+myInf, ng) + g_max[end-nc+1:end] .= 0 # custom constraints, if any, are always upper bounded ∇g_structure = init_diffstructure(∇g) From d4eb3f2262fe7e0e0e97b82a4bdff7190e2f689e Mon Sep 17 00:00:00 2001 From: franckgaga Date: Wed, 23 Jul 2025 17:01:28 -0400 Subject: [PATCH 08/17] =?UTF-8?q?wip:=20shrink=20`g=5Fmin`=20and=20`g=5Fma?= =?UTF-8?q?x`=20when=20there=20is=20=C2=B1Inf=20bounds?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- src/controller/nonlinmpc.jl | 16 +++++++++------- 1 file changed, 9 insertions(+), 7 deletions(-) diff --git a/src/controller/nonlinmpc.jl b/src/controller/nonlinmpc.jl index 4fdb1502c..529b2d3fe 100644 --- a/src/controller/nonlinmpc.jl +++ b/src/controller/nonlinmpc.jl @@ -557,7 +557,8 @@ function init_optimization!( - + # TODO: transfer all the following in set_nonlincon!, including a copy-paste + # of all the vectors above. function gfunc!(g, Z̃, ΔŨ, x̂0end, Ue, Ŷe, U0, Ŷ0, Û0, K0, X̂0, gc, geq) update_predictions!(ΔŨ, x̂0end, Ue, Ŷe, U0, Ŷ0, Û0, K0, X̂0, gc, g, geq, mpc, Z̃) return nothing @@ -584,20 +585,19 @@ function init_optimization!( end function gfunc_set!(g_arg, Z̃_arg) update_con!(g, ∇g, Z̃_∇g, Z̃_arg) - g_arg .= g + g_arg .= @views g[mpc.con.i_g] return nothing end function ∇gfunc_set!(∇g_arg, Z̃_arg) update_con!(g, ∇g, Z̃_∇g, Z̃_arg) - diffmat2vec!(∇g_arg, ∇g) + diffmat2vec!(∇g_arg, @views ∇g[mpc.con.i_g, :]) return nothing end - g_min = fill(-myInf, ng) - g_max = fill(+myInf, ng) - g_max[end-nc+1:end] .= 0 # custom constraints, if any, are always upper bounded + g_min = fill(-myInf, sum(mpc.con.i_g)) + g_max = zeros(JNT, sum(mpc.con.i_g)) - ∇g_structure = init_diffstructure(∇g) + ∇g_structure = init_diffstructure(∇g[mpc.con.i_g, :]) g_set = Ipopt._VectorNonlinearOracle(; dimension = nZ̃, @@ -609,6 +609,8 @@ function init_optimization!( ) @constraint(optim, Z̃var in g_set) + + function geqfunc!(geq, Z̃, ΔŨ, x̂0end, Ue, Ŷe, U0, Ŷ0, Û0, K0, X̂0, gc, g) update_predictions!(ΔŨ, x̂0end, Ue, Ŷe, U0, Ŷ0, Û0, K0, X̂0, gc, g, geq, mpc, Z̃) return nothing From e2afbe6151714617db12ed4038902d70dcd8b826 Mon Sep 17 00:00:00 2001 From: franckgaga Date: Wed, 23 Jul 2025 17:32:53 -0400 Subject: [PATCH 09/17] wip: re-create set in `setconstraint!` when needed --- src/controller/construct.jl | 2 + src/controller/nonlinmpc.jl | 256 +++++++++++++++++++----------------- 2 files changed, 136 insertions(+), 122 deletions(-) diff --git a/src/controller/construct.jl b/src/controller/construct.jl index a28bdacf3..15c64259e 100644 --- a/src/controller/construct.jl +++ b/src/controller/construct.jl @@ -437,6 +437,8 @@ function setconstraint!( # TODO: change this !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! if JuMP.solver_name(optim) ≠ "Ipopt" set_nonlincon!(mpc, model, transcription, optim) + else + set_nonlincon_exp(mpc, optim) end else i_b, i_g = init_matconstraint_mpc( diff --git a/src/controller/nonlinmpc.jl b/src/controller/nonlinmpc.jl index 529b2d3fe..2c4f64637 100644 --- a/src/controller/nonlinmpc.jl +++ b/src/controller/nonlinmpc.jl @@ -533,143 +533,155 @@ function init_optimization!( init_nonlincon!(mpc, model, transcription, gfuncs, ∇gfuncs!, geqfuncs, ∇geqfuncs!) set_nonlincon!(mpc, model, transcription, optim) else - # ========= Test new experimental feature ======================================== - - model = mpc.estim.model - jac = mpc.jacobian - nu, ny, nx̂, nϵ, nk = model.nu, model.ny, mpc.estim.nx̂, mpc.nϵ, model.nk - Hp, Hc = mpc.Hp, mpc.Hc - ng, nc, neq = length(mpc.con.i_g), mpc.con.nc, mpc.con.neq - nZ̃, nU, nŶ, nX̂, nK = length(mpc.Z̃), Hp*nu, Hp*ny, Hp*nx̂, Hp*nk - nΔŨ, nUe, nŶe = nu*Hc + nϵ, nU + nu, nŶ + ny - strict = Val(true) - myNaN = convert(JNT, NaN) - myInf = convert(JNT, Inf) - - ΔŨ::Vector{JNT} = zeros(JNT, nΔŨ) - x̂0end::Vector{JNT} = zeros(JNT, nx̂) - K0::Vector{JNT} = zeros(JNT, nK) - Ue::Vector{JNT}, Ŷe::Vector{JNT} = zeros(JNT, nUe), zeros(JNT, nŶe) - U0::Vector{JNT}, Ŷ0::Vector{JNT} = zeros(JNT, nU), zeros(JNT, nŶ) - Û0::Vector{JNT}, X̂0::Vector{JNT} = zeros(JNT, nU), zeros(JNT, nX̂) - gc::Vector{JNT}, g::Vector{JNT} = zeros(JNT, nc), zeros(JNT, ng) - geq::Vector{JNT} = zeros(JNT, neq) - - - - # TODO: transfer all the following in set_nonlincon!, including a copy-paste - # of all the vectors above. - function gfunc!(g, Z̃, ΔŨ, x̂0end, Ue, Ŷe, U0, Ŷ0, Û0, K0, X̂0, gc, geq) - update_predictions!(ΔŨ, x̂0end, Ue, Ŷe, U0, Ŷ0, Û0, K0, X̂0, gc, g, geq, mpc, Z̃) - return nothing - end - Z̃_∇g = fill(myNaN, nZ̃) # NaN to force update_predictions! at first call - ∇g_context = ( - Cache(ΔŨ), Cache(x̂0end), Cache(Ue), Cache(Ŷe), Cache(U0), Cache(Ŷ0), - Cache(Û0), Cache(K0), Cache(X̂0), - Cache(gc), Cache(geq), - ) - # temporarily enable all the inequality constraints for sparsity detection: - mpc.con.i_g[1:end-nc] .= true - ∇g_prep = prepare_jacobian(gfunc!, g, jac, Z̃_∇g, ∇g_context...; strict) - mpc.con.i_g[1:end-nc] .= false - ∇g = init_diffmat(JNT, jac, ∇g_prep, nZ̃, ng) + set_nonlincon_exp(mpc, optim) + end + return nothing +end +# TODO: cleanup this function, this is super dirty +function set_nonlincon_exp(mpc::PredictiveController, optim::JuMP.GenericModel{JNT}) where JNT<:Real + # ========= Test new experimental feature ======================================== - function update_con!(g, ∇g, Z̃_∇g, Z̃_arg) - if isdifferent(Z̃_arg, Z̃_∇g) - Z̃_∇g .= Z̃_arg - value_and_jacobian!(gfunc!, g, ∇g, ∇g_prep, jac, Z̃_∇g, ∇g_context...) - end - return nothing - end - function gfunc_set!(g_arg, Z̃_arg) - update_con!(g, ∇g, Z̃_∇g, Z̃_arg) - g_arg .= @views g[mpc.con.i_g] - return nothing - end - function ∇gfunc_set!(∇g_arg, Z̃_arg) - update_con!(g, ∇g, Z̃_∇g, Z̃_arg) - diffmat2vec!(∇g_arg, @views ∇g[mpc.con.i_g, :]) - return nothing - end + nonlin_constraints = JuMP.all_constraints(optim, JuMP.Vector{JuMP.VariableRef}, Ipopt._VectorNonlinearOracle) + map(con_ref -> JuMP.delete(optim, con_ref), nonlin_constraints) - g_min = fill(-myInf, sum(mpc.con.i_g)) - g_max = zeros(JNT, sum(mpc.con.i_g)) - ∇g_structure = init_diffstructure(∇g[mpc.con.i_g, :]) + Z̃var = optim[:Z̃var] - g_set = Ipopt._VectorNonlinearOracle(; - dimension = nZ̃, - l = g_min, - u = g_max, - eval_f = gfunc_set!, - jacobian_structure = ∇g_structure, - eval_jacobian = ∇gfunc_set! - ) - @constraint(optim, Z̃var in g_set) + model = mpc.estim.model + jac = mpc.jacobian + nu, ny, nx̂, nϵ, nk = model.nu, model.ny, mpc.estim.nx̂, mpc.nϵ, model.nk + Hp, Hc = mpc.Hp, mpc.Hc + ng, nc, neq = length(mpc.con.i_g), mpc.con.nc, mpc.con.neq + nZ̃, nU, nŶ, nX̂, nK = length(mpc.Z̃), Hp*nu, Hp*ny, Hp*nx̂, Hp*nk + nΔŨ, nUe, nŶe = nu*Hc + nϵ, nU + nu, nŶ + ny + strict = Val(true) + myNaN = convert(JNT, NaN) + myInf = convert(JNT, Inf) - - function geqfunc!(geq, Z̃, ΔŨ, x̂0end, Ue, Ŷe, U0, Ŷ0, Û0, K0, X̂0, gc, g) - update_predictions!(ΔŨ, x̂0end, Ue, Ŷe, U0, Ŷ0, Û0, K0, X̂0, gc, g, geq, mpc, Z̃) - return nothing - end - Z̃_∇geq = fill(myNaN, nZ̃) # NaN to force update_predictions! at first call - ∇geq_context = ( - Cache(ΔŨ), Cache(x̂0end), Cache(Ue), Cache(Ŷe), Cache(U0), Cache(Ŷ0), - Cache(Û0), Cache(K0), Cache(X̂0), - Cache(gc), Cache(g) - ) - ∇geq_prep = prepare_jacobian(geqfunc!, geq, jac, Z̃_∇geq, ∇geq_context...; strict) - ∇geq = init_diffmat(JNT, jac, ∇geq_prep, nZ̃, neq) + ΔŨ::Vector{JNT} = zeros(JNT, nΔŨ) + x̂0end::Vector{JNT} = zeros(JNT, nx̂) + K0::Vector{JNT} = zeros(JNT, nK) + Ue::Vector{JNT}, Ŷe::Vector{JNT} = zeros(JNT, nUe), zeros(JNT, nŶe) + U0::Vector{JNT}, Ŷ0::Vector{JNT} = zeros(JNT, nU), zeros(JNT, nŶ) + Û0::Vector{JNT}, X̂0::Vector{JNT} = zeros(JNT, nU), zeros(JNT, nX̂) + gc::Vector{JNT}, g::Vector{JNT} = zeros(JNT, nc), zeros(JNT, ng) + geq::Vector{JNT} = zeros(JNT, neq) - function update_con_eq!(geq, ∇geq, Z̃_∇geq, Z̃_arg) - if isdifferent(Z̃_arg, Z̃_∇geq) - Z̃_∇geq .= Z̃_arg - value_and_jacobian!(geqfunc!, geq, ∇geq, ∇geq_prep, jac, Z̃_∇geq, ∇geq_context...) - end - return nothing - end - function geqfunc_set!(geq_arg, Z̃_arg) - update_con_eq!(geq, ∇geq, Z̃_∇geq, Z̃_arg) - geq_arg .= geq - return nothing - end - function ∇geqfunc_set!(∇geq_arg, Z̃_arg) - update_con_eq!(geq, ∇geq, Z̃_∇geq, Z̃_arg) - diffmat2vec!(∇geq_arg, ∇geq) - return nothing + + # TODO: transfer all the following in set_nonlincon!, including a copy-paste + # of all the vectors above. + function gfunc!(g, Z̃, ΔŨ, x̂0end, Ue, Ŷe, U0, Ŷ0, Û0, K0, X̂0, gc, geq) + update_predictions!(ΔŨ, x̂0end, Ue, Ŷe, U0, Ŷ0, Û0, K0, X̂0, gc, g, geq, mpc, Z̃) + return nothing + end + Z̃_∇g = fill(myNaN, nZ̃) # NaN to force update_predictions! at first call + ∇g_context = ( + Cache(ΔŨ), Cache(x̂0end), Cache(Ue), Cache(Ŷe), Cache(U0), Cache(Ŷ0), + Cache(Û0), Cache(K0), Cache(X̂0), + Cache(gc), Cache(geq), + ) + # temporarily enable all the inequality constraints for sparsity detection: + mpc.con.i_g[1:end-nc] .= true + ∇g_prep = prepare_jacobian(gfunc!, g, jac, Z̃_∇g, ∇g_context...; strict) + mpc.con.i_g[1:end-nc] .= false + ∇g = init_diffmat(JNT, jac, ∇g_prep, nZ̃, ng) + + + function update_con!(g, ∇g, Z̃_∇g, Z̃_arg) + if isdifferent(Z̃_arg, Z̃_∇g) + Z̃_∇g .= Z̃_arg + value_and_jacobian!(gfunc!, g, ∇g, ∇g_prep, jac, Z̃_∇g, ∇g_context...) end + return nothing + end + function gfunc_set!(g_arg, Z̃_arg) + update_con!(g, ∇g, Z̃_∇g, Z̃_arg) + g_arg .= @views g[mpc.con.i_g] + return nothing + end + function ∇gfunc_set!(∇g_arg, Z̃_arg) + update_con!(g, ∇g, Z̃_∇g, Z̃_arg) + diffmat2vec!(∇g_arg, @views ∇g[mpc.con.i_g, :]) + return nothing + end - geq_min = zeros(JNT, mpc.con.neq) - geq_max = zeros(JNT, mpc.con.neq) + g_min = fill(-myInf, sum(mpc.con.i_g)) + g_max = zeros(JNT, sum(mpc.con.i_g)) + + ∇g_structure = init_diffstructure(∇g[mpc.con.i_g, :]) + + g_set = Ipopt._VectorNonlinearOracle(; + dimension = nZ̃, + l = g_min, + u = g_max, + eval_f = gfunc_set!, + jacobian_structure = ∇g_structure, + eval_jacobian = ∇gfunc_set! + ) + @constraint(optim, Z̃var in g_set) - ∇geq_structure = init_diffstructure(∇geq) - #= - # Langragian of the optimization problem: - function Lfunc!(Z̃, μ, ΔŨ, x̂0end, Ue, Ŷe, U0, Ŷ0, Û0, K0, X̂0, gc, g, geq) - update_predictions!(ΔŨ, x̂0end, Ue, Ŷe, U0, Ŷ0, Û0, K0, X̂0, gc, g, geq, mpc, Z̃) - J = obj_nonlinprog!(Ŷ0, U0, mpc, model, Ue, Ŷe, ΔŨ) - L = J + dot(μ, geq) - return L + + function geqfunc!(geq, Z̃, ΔŨ, x̂0end, Ue, Ŷe, U0, Ŷ0, Û0, K0, X̂0, gc, g) + update_predictions!(ΔŨ, x̂0end, Ue, Ŷe, U0, Ŷ0, Û0, K0, X̂0, gc, g, geq, mpc, Z̃) + return nothing + end + Z̃_∇geq = fill(myNaN, nZ̃) # NaN to force update_predictions! at first call + ∇geq_context = ( + Cache(ΔŨ), Cache(x̂0end), Cache(Ue), Cache(Ŷe), Cache(U0), Cache(Ŷ0), + Cache(Û0), Cache(K0), Cache(X̂0), + Cache(gc), Cache(g) + ) + ∇geq_prep = prepare_jacobian(geqfunc!, geq, jac, Z̃_∇geq, ∇geq_context...; strict) + ∇geq = init_diffmat(JNT, jac, ∇geq_prep, nZ̃, neq) + + + function update_con_eq!(geq, ∇geq, Z̃_∇geq, Z̃_arg) + if isdifferent(Z̃_arg, Z̃_∇geq) + Z̃_∇geq .= Z̃_arg + value_and_jacobian!(geqfunc!, geq, ∇geq, ∇geq_prep, jac, Z̃_∇geq, ∇geq_context...) end - =# - - - geq_set = Ipopt._VectorNonlinearOracle(; - dimension = nZ̃, - l = geq_min, - u = geq_max, - eval_f = geqfunc_set!, - jacobian_structure = ∇geq_structure, - eval_jacobian = ∇geqfunc_set! - ) - @constraint(optim, Z̃var in geq_set) + return nothing + end + function geqfunc_set!(geq_arg, Z̃_arg) + update_con_eq!(geq, ∇geq, Z̃_∇geq, Z̃_arg) + geq_arg .= geq + return nothing + end + function ∇geqfunc_set!(∇geq_arg, Z̃_arg) + update_con_eq!(geq, ∇geq, Z̃_∇geq, Z̃_arg) + diffmat2vec!(∇geq_arg, ∇geq) + return nothing + end - end + geq_min = zeros(JNT, mpc.con.neq) + geq_max = zeros(JNT, mpc.con.neq) + + ∇geq_structure = init_diffstructure(∇geq) + + #= + # Langragian of the optimization problem: + function Lfunc!(Z̃, μ, ΔŨ, x̂0end, Ue, Ŷe, U0, Ŷ0, Û0, K0, X̂0, gc, g, geq) + update_predictions!(ΔŨ, x̂0end, Ue, Ŷe, U0, Ŷ0, Û0, K0, X̂0, gc, g, geq, mpc, Z̃) + J = obj_nonlinprog!(Ŷ0, U0, mpc, model, Ue, Ŷe, ΔŨ) + L = J + dot(μ, geq) + return L + end + =# + + + geq_set = Ipopt._VectorNonlinearOracle(; + dimension = nZ̃, + l = geq_min, + u = geq_max, + eval_f = geqfunc_set!, + jacobian_structure = ∇geq_structure, + eval_jacobian = ∇geqfunc_set! + ) + @constraint(optim, Z̃var in geq_set) return nothing end From de25fcff70432705805b80eab19a62319b929e34 Mon Sep 17 00:00:00 2001 From: franckgaga Date: Wed, 23 Jul 2025 18:11:32 -0400 Subject: [PATCH 10/17] debug: new exp. set only for `NonLinMPC` --- src/controller/nonlinmpc.jl | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/src/controller/nonlinmpc.jl b/src/controller/nonlinmpc.jl index 2c4f64637..d8337f185 100644 --- a/src/controller/nonlinmpc.jl +++ b/src/controller/nonlinmpc.jl @@ -538,8 +538,9 @@ function init_optimization!( return nothing end +set_nonlincon!(::PredictiveController, _ ) = nothing # TODO: cleanup this function, this is super dirty -function set_nonlincon_exp(mpc::PredictiveController, optim::JuMP.GenericModel{JNT}) where JNT<:Real +function set_nonlincon_exp(mpc::NonLinMPC, optim::JuMP.GenericModel{JNT}) where JNT<:Real # ========= Test new experimental feature ======================================== nonlin_constraints = JuMP.all_constraints(optim, JuMP.Vector{JuMP.VariableRef}, Ipopt._VectorNonlinearOracle) From e181a34d8dc7c27b26cf1e001c0c9ff4928043bc Mon Sep 17 00:00:00 2001 From: franckgaga Date: Wed, 23 Jul 2025 18:16:41 -0400 Subject: [PATCH 11/17] debug: idem --- src/controller/nonlinmpc.jl | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/controller/nonlinmpc.jl b/src/controller/nonlinmpc.jl index d8337f185..818d777f9 100644 --- a/src/controller/nonlinmpc.jl +++ b/src/controller/nonlinmpc.jl @@ -538,7 +538,7 @@ function init_optimization!( return nothing end -set_nonlincon!(::PredictiveController, _ ) = nothing +set_nonlincon!(::PredictiveController, ::JuMP.GenericModel) = nothing # TODO: cleanup this function, this is super dirty function set_nonlincon_exp(mpc::NonLinMPC, optim::JuMP.GenericModel{JNT}) where JNT<:Real # ========= Test new experimental feature ======================================== From 1ee88f16f4748e78c2e46ce66bf65519d97e6fba Mon Sep 17 00:00:00 2001 From: franckgaga Date: Wed, 23 Jul 2025 18:19:25 -0400 Subject: [PATCH 12/17] debug: idem --- src/controller/nonlinmpc.jl | 2 ++ 1 file changed, 2 insertions(+) diff --git a/src/controller/nonlinmpc.jl b/src/controller/nonlinmpc.jl index 818d777f9..6360d8291 100644 --- a/src/controller/nonlinmpc.jl +++ b/src/controller/nonlinmpc.jl @@ -539,6 +539,8 @@ function init_optimization!( end set_nonlincon!(::PredictiveController, ::JuMP.GenericModel) = nothing +set_nonlincon!(::LinMPC, ::JuMP.GenericModel) = nothing +set_nonlincon!(::ExplicitMPC, ::JuMP.GenericModel) = nothing # TODO: cleanup this function, this is super dirty function set_nonlincon_exp(mpc::NonLinMPC, optim::JuMP.GenericModel{JNT}) where JNT<:Real # ========= Test new experimental feature ======================================== From 5bcd0377b271c8f7d4a47b3ac580b3be221c500f Mon Sep 17 00:00:00 2001 From: franckgaga Date: Wed, 23 Jul 2025 18:20:07 -0400 Subject: [PATCH 13/17] debug : idem --- src/controller/nonlinmpc.jl | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/src/controller/nonlinmpc.jl b/src/controller/nonlinmpc.jl index 6360d8291..62a7accc4 100644 --- a/src/controller/nonlinmpc.jl +++ b/src/controller/nonlinmpc.jl @@ -538,9 +538,7 @@ function init_optimization!( return nothing end -set_nonlincon!(::PredictiveController, ::JuMP.GenericModel) = nothing -set_nonlincon!(::LinMPC, ::JuMP.GenericModel) = nothing -set_nonlincon!(::ExplicitMPC, ::JuMP.GenericModel) = nothing +set_nonlincon_exp(::PredictiveController, ::JuMP.GenericModel) = nothing # TODO: cleanup this function, this is super dirty function set_nonlincon_exp(mpc::NonLinMPC, optim::JuMP.GenericModel{JNT}) where JNT<:Real # ========= Test new experimental feature ======================================== From 754b5f5f7cd430b538108f82d04c7e53c90bf005 Mon Sep 17 00:00:00 2001 From: franckgaga Date: Wed, 23 Jul 2025 21:01:16 -0400 Subject: [PATCH 14/17] wip: debug --- src/controller/nonlinmpc.jl | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/src/controller/nonlinmpc.jl b/src/controller/nonlinmpc.jl index 62a7accc4..911c728cc 100644 --- a/src/controller/nonlinmpc.jl +++ b/src/controller/nonlinmpc.jl @@ -603,9 +603,11 @@ function set_nonlincon_exp(mpc::NonLinMPC, optim::JuMP.GenericModel{JNT}) where g_arg .= @views g[mpc.con.i_g] return nothing end + ∇g_i_g = ∇g[mpc.con.i_g, :] function ∇gfunc_set!(∇g_arg, Z̃_arg) update_con!(g, ∇g, Z̃_∇g, Z̃_arg) - diffmat2vec!(∇g_arg, @views ∇g[mpc.con.i_g, :]) + ∇g_i_g .= @views ∇g[mpc.con.i_g, :] + diffmat2vec!(∇g_arg, ∇g_i_g) return nothing end From 297314e6d6768ce133104ea9a99d0497b99ae727 Mon Sep 17 00:00:00 2001 From: franckgaga Date: Thu, 16 Oct 2025 12:37:24 -0400 Subject: [PATCH 15/17] changed: cleanup in new nonlinear oracle code --- src/controller/construct.jl | 10 +- src/controller/nonlinmpc.jl | 371 +++++++++++++++++--------------- src/controller/transcription.jl | 3 - 3 files changed, 202 insertions(+), 182 deletions(-) diff --git a/src/controller/construct.jl b/src/controller/construct.jl index 5df3b2a14..fce6470e4 100644 --- a/src/controller/construct.jl +++ b/src/controller/construct.jl @@ -439,11 +439,11 @@ function setconstraint!( JuMP.delete(optim, optim[:linconstraint]) JuMP.unregister(optim, :linconstraint) @constraint(optim, linconstraint, A*Z̃var .≤ b) - # TODO: change this !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! if JuMP.solver_name(optim) ≠ "Ipopt" set_nonlincon!(mpc, model, transcription, optim) else - set_nonlincon_exp(mpc, optim) + g_oracle, geq_oracle = get_nonlinops(mpc, optim) + set_nonlincon_exp!(mpc, transcription, g_oracle, geq_oracle) end else i_b, i_g = init_matconstraint_mpc( @@ -458,6 +458,12 @@ function setconstraint!( return mpc end +"By default, no nonlinear operators, return 4 nothing" +get_nonlinops(::PredictiveController, _ ) = (nothing, nothing, nothing, nothing) + +"By default, no nonlinear constraints, return nothing." +set_nonlincon_exp!(::PredictiveController, ::TranscriptionMethod, _ , _) = nothing + """ default_Hp(model::LinModel) diff --git a/src/controller/nonlinmpc.jl b/src/controller/nonlinmpc.jl index 39a9dd765..6057dc8bb 100644 --- a/src/controller/nonlinmpc.jl +++ b/src/controller/nonlinmpc.jl @@ -540,181 +540,37 @@ function init_optimization!( JuMP.set_attribute(optim, "nlp_scaling_max_gradient", 10.0/C) end end - Jfunc, ∇Jfunc!, gfuncs, ∇gfuncs!, geqfuncs, ∇geqfuncs! = get_optim_functions( - mpc, optim - ) - @operator(optim, J, nZ̃, Jfunc, ∇Jfunc!) - @objective(optim, Min, J(Z̃var...)) if JuMP.solver_name(optim) ≠ "Ipopt" - init_nonlincon!(mpc, model, transcription, gfuncs, ∇gfuncs!, geqfuncs, ∇geqfuncs!) + # everything with the splatting syntax: + J_func, ∇J_func!, g_funcs, ∇g_funcs!, geq_funcs, ∇geq_funcs! = get_optim_functions( + mpc, optim + ) + else + # constraints with vector nonlinear oracle, objective function with splatting: + g_oracle, geq_oracle, J_func, ∇J_func! = get_nonlinops(mpc, optim) + end + @operator(optim, J_op, nZ̃, J_func, ∇J_func!) + @objective(optim, Min, J_op(Z̃var...)) + if JuMP.solver_name(optim) ≠ "Ipopt" + init_nonlincon!(mpc, model, transcription, g_funcs, ∇g_funcs!, geq_funcs, ∇geq_funcs!) set_nonlincon!(mpc, model, transcription, optim) else - set_nonlincon_exp(mpc, optim) + set_nonlincon_exp!(mpc, transcription, g_oracle, geq_oracle) end return nothing end -set_nonlincon_exp(::PredictiveController, ::JuMP.GenericModel) = nothing -# TODO: cleanup this function, this is super dirty -function set_nonlincon_exp(mpc::NonLinMPC, optim::JuMP.GenericModel{JNT}) where JNT<:Real - # ========= Test new experimental feature ======================================== - - nonlin_constraints = JuMP.all_constraints(optim, JuMP.Vector{JuMP.VariableRef}, Ipopt._VectorNonlinearOracle) - map(con_ref -> JuMP.delete(optim, con_ref), nonlin_constraints) - - - Z̃var = optim[:Z̃var] - - model = mpc.estim.model - jac = mpc.jacobian - nu, ny, nx̂, nϵ, nk = model.nu, model.ny, mpc.estim.nx̂, mpc.nϵ, model.nk - Hp, Hc = mpc.Hp, mpc.Hc - ng, nc, neq = length(mpc.con.i_g), mpc.con.nc, mpc.con.neq - nZ̃, nU, nŶ, nX̂, nK = length(mpc.Z̃), Hp*nu, Hp*ny, Hp*nx̂, Hp*nk - nΔŨ, nUe, nŶe = nu*Hc + nϵ, nU + nu, nŶ + ny - strict = Val(true) - myNaN = convert(JNT, NaN) - myInf = convert(JNT, Inf) - - - ΔŨ::Vector{JNT} = zeros(JNT, nΔŨ) - x̂0end::Vector{JNT} = zeros(JNT, nx̂) - K0::Vector{JNT} = zeros(JNT, nK) - Ue::Vector{JNT}, Ŷe::Vector{JNT} = zeros(JNT, nUe), zeros(JNT, nŶe) - U0::Vector{JNT}, Ŷ0::Vector{JNT} = zeros(JNT, nU), zeros(JNT, nŶ) - Û0::Vector{JNT}, X̂0::Vector{JNT} = zeros(JNT, nU), zeros(JNT, nX̂) - gc::Vector{JNT}, g::Vector{JNT} = zeros(JNT, nc), zeros(JNT, ng) - geq::Vector{JNT} = zeros(JNT, neq) - - - - # TODO: transfer all the following in set_nonlincon!, including a copy-paste - # of all the vectors above. - function gfunc!(g, Z̃, ΔŨ, x̂0end, Ue, Ŷe, U0, Ŷ0, Û0, K0, X̂0, gc, geq) - update_predictions!(ΔŨ, x̂0end, Ue, Ŷe, U0, Ŷ0, Û0, K0, X̂0, gc, g, geq, mpc, Z̃) - return nothing - end - Z̃_∇g = fill(myNaN, nZ̃) # NaN to force update_predictions! at first call - ∇g_context = ( - Cache(ΔŨ), Cache(x̂0end), Cache(Ue), Cache(Ŷe), Cache(U0), Cache(Ŷ0), - Cache(Û0), Cache(K0), Cache(X̂0), - Cache(gc), Cache(geq), - ) - # temporarily enable all the inequality constraints for sparsity detection: - mpc.con.i_g[1:end-nc] .= true - ∇g_prep = prepare_jacobian(gfunc!, g, jac, Z̃_∇g, ∇g_context...; strict) - mpc.con.i_g[1:end-nc] .= false - ∇g = init_diffmat(JNT, jac, ∇g_prep, nZ̃, ng) - - - function update_con!(g, ∇g, Z̃_∇g, Z̃_arg) - if isdifferent(Z̃_arg, Z̃_∇g) - Z̃_∇g .= Z̃_arg - value_and_jacobian!(gfunc!, g, ∇g, ∇g_prep, jac, Z̃_∇g, ∇g_context...) - end - return nothing - end - function gfunc_set!(g_arg, Z̃_arg) - update_con!(g, ∇g, Z̃_∇g, Z̃_arg) - g_arg .= @views g[mpc.con.i_g] - return nothing - end - ∇g_i_g = ∇g[mpc.con.i_g, :] - function ∇gfunc_set!(∇g_arg, Z̃_arg) - update_con!(g, ∇g, Z̃_∇g, Z̃_arg) - ∇g_i_g .= @views ∇g[mpc.con.i_g, :] - diffmat2vec!(∇g_arg, ∇g_i_g) - return nothing - end - - g_min = fill(-myInf, sum(mpc.con.i_g)) - g_max = zeros(JNT, sum(mpc.con.i_g)) - - ∇g_structure = init_diffstructure(∇g[mpc.con.i_g, :]) - - g_set = Ipopt._VectorNonlinearOracle(; - dimension = nZ̃, - l = g_min, - u = g_max, - eval_f = gfunc_set!, - jacobian_structure = ∇g_structure, - eval_jacobian = ∇gfunc_set! - ) - @constraint(optim, Z̃var in g_set) - - - - function geqfunc!(geq, Z̃, ΔŨ, x̂0end, Ue, Ŷe, U0, Ŷ0, Û0, K0, X̂0, gc, g) - update_predictions!(ΔŨ, x̂0end, Ue, Ŷe, U0, Ŷ0, Û0, K0, X̂0, gc, g, geq, mpc, Z̃) - return nothing - end - Z̃_∇geq = fill(myNaN, nZ̃) # NaN to force update_predictions! at first call - ∇geq_context = ( - Cache(ΔŨ), Cache(x̂0end), Cache(Ue), Cache(Ŷe), Cache(U0), Cache(Ŷ0), - Cache(Û0), Cache(K0), Cache(X̂0), - Cache(gc), Cache(g) - ) - ∇geq_prep = prepare_jacobian(geqfunc!, geq, jac, Z̃_∇geq, ∇geq_context...; strict) - ∇geq = init_diffmat(JNT, jac, ∇geq_prep, nZ̃, neq) - - - function update_con_eq!(geq, ∇geq, Z̃_∇geq, Z̃_arg) - if isdifferent(Z̃_arg, Z̃_∇geq) - Z̃_∇geq .= Z̃_arg - value_and_jacobian!(geqfunc!, geq, ∇geq, ∇geq_prep, jac, Z̃_∇geq, ∇geq_context...) - end - return nothing - end - function geqfunc_set!(geq_arg, Z̃_arg) - update_con_eq!(geq, ∇geq, Z̃_∇geq, Z̃_arg) - geq_arg .= geq - return nothing - end - function ∇geqfunc_set!(∇geq_arg, Z̃_arg) - update_con_eq!(geq, ∇geq, Z̃_∇geq, Z̃_arg) - diffmat2vec!(∇geq_arg, ∇geq) - return nothing - end - - geq_min = zeros(JNT, mpc.con.neq) - geq_max = zeros(JNT, mpc.con.neq) - - ∇geq_structure = init_diffstructure(∇geq) - - #= - # Langragian of the optimization problem: - function Lfunc!(Z̃, μ, ΔŨ, x̂0end, Ue, Ŷe, U0, Ŷ0, Û0, K0, X̂0, gc, g, geq) - update_predictions!(ΔŨ, x̂0end, Ue, Ŷe, U0, Ŷ0, Û0, K0, X̂0, gc, g, geq, mpc, Z̃) - J = obj_nonlinprog!(Ŷ0, U0, mpc, model, Ue, Ŷe, ΔŨ) - L = J + dot(μ, geq) - return L - end - =# - - - geq_set = Ipopt._VectorNonlinearOracle(; - dimension = nZ̃, - l = geq_min, - u = geq_max, - eval_f = geqfunc_set!, - jacobian_structure = ∇geq_structure, - eval_jacobian = ∇geqfunc_set! - ) - @constraint(optim, Z̃var in geq_set) - return nothing -end - """ get_optim_functions( mpc::NonLinMPC, optim::JuMP.GenericModel - ) -> Jfunc, ∇Jfunc!, gfuncs, ∇gfuncs!, geqfuncs, ∇geqfuncs! + ) -> J_func, ∇J_func!, g_funcs, ∇g_funcs!, geq_funcs, ∇geq_funcs! Return the functions for the nonlinear optimization of `mpc` [`NonLinMPC`](@ref) controller. -Return the nonlinear objective `Jfunc` function, and `∇Jfunc!`, to compute its gradient. -Also return vectors with the nonlinear inequality constraint functions `gfuncs`, and -`∇gfuncs!`, for the associated gradients. Lastly, also return vectors with the nonlinear -equality constraint functions `geqfuncs` and gradients `∇geqfuncs!`. +Return the nonlinear objective `J_func` function, and `∇J_func!`, to compute its gradient. +Also return vectors with the nonlinear inequality constraint functions `g_funcs`, and +`∇g_funcs!`, for the associated gradients. Lastly, also return vectors with the nonlinear +equality constraint functions `geq_funcs` and gradients `∇geq_funcs!`. This method is really intricate and I'm not proud of it. That's because of 3 elements: @@ -771,11 +627,11 @@ function get_optim_functions(mpc::NonLinMPC, ::JuMP.GenericModel{JNT}) where JNT J[], _ = value_and_gradient!(Jfunc!, ∇J, ∇J_prep, grad, Z̃_∇J, ∇J_context...) end end - function Jfunc(Z̃arg::Vararg{T, N}) where {N, T<:Real} + function J_func(Z̃arg::Vararg{T, N}) where {N, T<:Real} update_objective!(J, ∇J, Z̃_∇J, Z̃arg) return J[]::T end - ∇Jfunc! = if nZ̃ == 1 # univariate syntax (see JuMP.@operator doc): + ∇J_func! = if nZ̃ == 1 # univariate syntax (see JuMP.@operator doc): function (Z̃arg) update_objective!(J, ∇J, Z̃_∇J, Z̃arg) return ∇J[begin] @@ -808,16 +664,16 @@ function get_optim_functions(mpc::NonLinMPC, ::JuMP.GenericModel{JNT}) where JNT value_and_jacobian!(gfunc!, g, ∇g, ∇g_prep, jac, Z̃_∇g, ∇g_context...) end end - gfuncs = Vector{Function}(undef, ng) - for i in eachindex(gfuncs) + g_funcs = Vector{Function}(undef, ng) + for i in eachindex(g_funcs) gfunc_i = function (Z̃arg::Vararg{T, N}) where {N, T<:Real} update_con!(g, ∇g, Z̃_∇g, Z̃arg) return g[i]::T end - gfuncs[i] = gfunc_i + g_funcs[i] = gfunc_i end - ∇gfuncs! = Vector{Function}(undef, ng) - for i in eachindex(∇gfuncs!) + ∇g_funcs! = Vector{Function}(undef, ng) + for i in eachindex(∇g_funcs!) ∇gfuncs_i! = if nZ̃ == 1 # univariate syntax (see JuMP.@operator doc): function (Z̃arg::T) where T<:Real update_con!(g, ∇g, Z̃_∇g, Z̃arg) @@ -829,7 +685,7 @@ function get_optim_functions(mpc::NonLinMPC, ::JuMP.GenericModel{JNT}) where JNT return ∇g_i .= @views ∇g[i, :] end end - ∇gfuncs![i] = ∇gfuncs_i! + ∇g_funcs![i] = ∇gfuncs_i! end # --------------------- equality constraint functions --------------------------------- function geqfunc!(geq, Z̃, ΔŨ, x̂0end, Ue, Ŷe, U0, Ŷ0, Û0, K0, X̂0, gc, g) @@ -850,16 +706,16 @@ function get_optim_functions(mpc::NonLinMPC, ::JuMP.GenericModel{JNT}) where JNT value_and_jacobian!(geqfunc!, geq, ∇geq, ∇geq_prep, jac, Z̃_∇geq, ∇geq_context...) end end - geqfuncs = Vector{Function}(undef, neq) - for i in eachindex(geqfuncs) + geq_funcs = Vector{Function}(undef, neq) + for i in eachindex(geq_funcs) geqfunc_i = function (Z̃arg::Vararg{T, N}) where {N, T<:Real} update_con_eq!(geq, ∇geq, Z̃_∇geq, Z̃arg) return geq[i]::T end - geqfuncs[i] = geqfunc_i + geq_funcs[i] = geqfunc_i end - ∇geqfuncs! = Vector{Function}(undef, neq) - for i in eachindex(∇geqfuncs!) + ∇geq_funcs! = Vector{Function}(undef, neq) + for i in eachindex(∇geq_funcs!) # only multivariate syntax, univariate is impossible since nonlinear equality # constraints imply MultipleShooting, thus input increment ΔU and state X̂0 in Z̃: ∇geqfuncs_i! = @@ -867,11 +723,158 @@ function get_optim_functions(mpc::NonLinMPC, ::JuMP.GenericModel{JNT}) where JNT update_con_eq!(geq, ∇geq, Z̃_∇geq, Z̃arg) return ∇geq_i .= @views ∇geq[i, :] end - ∇geqfuncs![i] = ∇geqfuncs_i! + ∇geq_funcs![i] = ∇geqfuncs_i! + end + return J_func, ∇J_func!, g_funcs, ∇g_funcs!, geq_funcs, ∇geq_funcs! +end + +# TODO: move docstring of method above here an re-work it +function get_nonlinops(mpc::NonLinMPC, optim::JuMP.GenericModel{JNT}) where JNT<:Real + # ----------- common cache for all functions ---------------------------------------- + model = mpc.estim.model + transcription = mpc.transcription + grad, jac = mpc.gradient, mpc.jacobian + nu, ny, nx̂, nϵ = model.nu, model.ny, mpc.estim.nx̂, mpc.nϵ + nk = get_nk(model, transcription) + Hp, Hc = mpc.Hp, mpc.Hc + ng, nc, neq = length(mpc.con.i_g), mpc.con.nc, mpc.con.neq + nZ̃, nU, nŶ, nX̂, nK = length(mpc.Z̃), Hp*nu, Hp*ny, Hp*nx̂, Hp*nk + nΔŨ, nUe, nŶe = nu*Hc + nϵ, nU + nu, nŶ + ny + strict = Val(true) + myNaN, myInf = convert(JNT, NaN), convert(JNT, Inf) + J::Vector{JNT} = zeros(JNT, 1) + ΔŨ::Vector{JNT} = zeros(JNT, nΔŨ) + x̂0end::Vector{JNT} = zeros(JNT, nx̂) + K0::Vector{JNT} = zeros(JNT, nK) + Ue::Vector{JNT}, Ŷe::Vector{JNT} = zeros(JNT, nUe), zeros(JNT, nŶe) + U0::Vector{JNT}, Ŷ0::Vector{JNT} = zeros(JNT, nU), zeros(JNT, nŶ) + Û0::Vector{JNT}, X̂0::Vector{JNT} = zeros(JNT, nU), zeros(JNT, nX̂) + gc::Vector{JNT}, g::Vector{JNT} = zeros(JNT, nc), zeros(JNT, ng) + geq::Vector{JNT} = zeros(JNT, neq) + # -------------- inequality constraint: nonlinear oracle ----------------------------- + function g!(g, Z̃, ΔŨ, x̂0end, Ue, Ŷe, U0, Ŷ0, Û0, K0, X̂0, gc, geq) + update_predictions!(ΔŨ, x̂0end, Ue, Ŷe, U0, Ŷ0, Û0, K0, X̂0, gc, g, geq, mpc, Z̃) + return nothing + end + Z̃_∇g = fill(myNaN, nZ̃) # NaN to force update_predictions! at first call + ∇g_context = ( + Cache(ΔŨ), Cache(x̂0end), Cache(Ue), Cache(Ŷe), Cache(U0), Cache(Ŷ0), + Cache(Û0), Cache(K0), Cache(X̂0), + Cache(gc), Cache(geq), + ) + ## temporarily enable all the inequality constraints for sparsity detection: + # mpc.con.i_g[1:end-nc] .= true + ∇g_prep = prepare_jacobian(g!, g, jac, Z̃_∇g, ∇g_context...; strict) + # mpc.con.i_g[1:end-nc] .= false + ∇g = init_diffmat(JNT, jac, ∇g_prep, nZ̃, ng) + function update_con!(g, ∇g, Z̃_∇g, Z̃_arg) + if isdifferent(Z̃_arg, Z̃_∇g) + Z̃_∇g .= Z̃_arg + value_and_jacobian!(g!, g, ∇g, ∇g_prep, jac, Z̃_∇g, ∇g_context...) + end + return nothing + end + function gfunc_oracle!(g_arg, Z̃_arg) + update_con!(g, ∇g, Z̃_∇g, Z̃_arg) + g_arg .= @views g[mpc.con.i_g] + return nothing + end + ∇g_i_g = ∇g[mpc.con.i_g, :] + function ∇gfunc_oracle!(∇g_arg, Z̃_arg) + update_con!(g, ∇g, Z̃_∇g, Z̃_arg) + ∇g_i_g .= @views ∇g[mpc.con.i_g, :] + diffmat2vec!(∇g_arg, ∇g_i_g) + return nothing + end + g_min = fill(-myInf, sum(mpc.con.i_g)) + g_max = zeros(JNT, sum(mpc.con.i_g)) + ∇g_structure = init_diffstructure(∇g[mpc.con.i_g, :]) + g_oracle = Ipopt._VectorNonlinearOracle(; + dimension = nZ̃, + l = g_min, + u = g_max, + eval_f = gfunc_oracle!, + jacobian_structure = ∇g_structure, + eval_jacobian = ∇gfunc_oracle! + ) + # ------------- equality constraints : nonlinear oracle ------------------------------ + function geq!(geq, Z̃, ΔŨ, x̂0end, Ue, Ŷe, U0, Ŷ0, Û0, K0, X̂0, gc, g) + update_predictions!(ΔŨ, x̂0end, Ue, Ŷe, U0, Ŷ0, Û0, K0, X̂0, gc, g, geq, mpc, Z̃) + return nothing + end + Z̃_∇geq = fill(myNaN, nZ̃) # NaN to force update_predictions! at first call + ∇geq_context = ( + Cache(ΔŨ), Cache(x̂0end), Cache(Ue), Cache(Ŷe), Cache(U0), Cache(Ŷ0), + Cache(Û0), Cache(K0), Cache(X̂0), + Cache(gc), Cache(g) + ) + ∇geq_prep = prepare_jacobian(geq!, geq, jac, Z̃_∇geq, ∇geq_context...; strict) + ∇geq = init_diffmat(JNT, jac, ∇geq_prep, nZ̃, neq) + function update_con_eq!(geq, ∇geq, Z̃_∇geq, Z̃_arg) + if isdifferent(Z̃_arg, Z̃_∇geq) + Z̃_∇geq .= Z̃_arg + value_and_jacobian!(geq!, geq, ∇geq, ∇geq_prep, jac, Z̃_∇geq, ∇geq_context...) + end + return nothing + end + function geq_oracle!(geq_arg, Z̃_arg) + update_con_eq!(geq, ∇geq, Z̃_∇geq, Z̃_arg) + geq_arg .= geq + return nothing end - return Jfunc, ∇Jfunc!, gfuncs, ∇gfuncs!, geqfuncs, ∇geqfuncs! + function ∇geq_oracle!(∇geq_arg, Z̃_arg) + update_con_eq!(geq, ∇geq, Z̃_∇geq, Z̃_arg) + diffmat2vec!(∇geq_arg, ∇geq) + return nothing + end + geq_min = geq_max = zeros(JNT, neq) + ∇geq_structure = init_diffstructure(∇geq) + geq_oracle = Ipopt._VectorNonlinearOracle(; + dimension = nZ̃, + l = geq_min, + u = geq_max, + eval_f = geq_oracle!, + jacobian_structure = ∇geq_structure, + eval_jacobian = ∇geq_oracle! + ) + # ------------- objective function: splatting syntax --------------------------------- + function J!(Z̃, ΔŨ, x̂0end, Ue, Ŷe, U0, Ŷ0, Û0, K0, X̂0, gc, g, geq) + update_predictions!(ΔŨ, x̂0end, Ue, Ŷe, U0, Ŷ0, Û0, K0, X̂0, gc, g, geq, mpc, Z̃) + return obj_nonlinprog!(Ŷ0, U0, mpc, model, Ue, Ŷe, ΔŨ) + end + Z̃_∇J = fill(myNaN, nZ̃) # NaN to force update_predictions! at first call + ∇J_context = ( + Cache(ΔŨ), Cache(x̂0end), Cache(Ue), Cache(Ŷe), Cache(U0), Cache(Ŷ0), + Cache(Û0), Cache(K0), Cache(X̂0), + Cache(gc), Cache(g), Cache(geq), + ) + ∇J_prep = prepare_gradient(J!, grad, Z̃_∇J, ∇J_context...; strict) + ∇J = Vector{JNT}(undef, nZ̃) + function update_objective!(J, ∇J, Z̃_∇J, Z̃arg) + if isdifferent(Z̃arg, Z̃_∇J) + Z̃_∇J .= Z̃arg + J[], _ = value_and_gradient!(J!, ∇J, ∇J_prep, grad, Z̃_∇J, ∇J_context...) + end + end + function J_func(Z̃arg::Vararg{T, N}) where {N, T<:Real} + update_objective!(J, ∇J, Z̃_∇J, Z̃arg) + return J[]::T + end + ∇J_func! = if nZ̃ == 1 # univariate syntax (see JuMP.@operator doc): + function (Z̃arg) + update_objective!(J, ∇J, Z̃_∇J, Z̃arg) + return ∇J[] + end + else # multivariate syntax (see JuMP.@operator doc): + function (∇Jarg::AbstractVector{T}, Z̃arg::Vararg{T, N}) where {N, T<:Real} + update_objective!(J, ∇J, Z̃_∇J, Z̃arg) + return ∇Jarg .= ∇J + end + end + return g_oracle, geq_oracle, J_func, ∇J_func! end + """ update_predictions!( ΔŨ, x̂0end, Ue, Ŷe, U0, Ŷ0, Û0, K0, X̂0, gc, g, geq, @@ -897,6 +900,20 @@ function update_predictions!( return nothing end +function set_nonlincon_exp!( + mpc::NonLinMPC, ::TranscriptionMethod, g_oracle, geq_oracle +) + optim = mpc.optim + Z̃var = optim[:Z̃var] + nonlin_constraints = JuMP.all_constraints( + optim, JuMP.Vector{JuMP.VariableRef}, Ipopt._VectorNonlinearOracle + ) + map(con_ref -> JuMP.delete(optim, con_ref), nonlin_constraints) + @constraint(optim, Z̃var in g_oracle) + mpc.con.neq > 0 && @constraint(optim, Z̃var in geq_oracle) + return nothing +end + @doc raw""" con_custom!(gc, mpc::NonLinMPC, Ue, Ŷe, ϵ) -> gc diff --git a/src/controller/transcription.jl b/src/controller/transcription.jl index d2b22438f..a63f41afa 100644 --- a/src/controller/transcription.jl +++ b/src/controller/transcription.jl @@ -1016,9 +1016,6 @@ function linconstraint!(mpc::PredictiveController, ::NonLinModel, ::SingleShooti return nothing end - - - @doc raw""" linconstrainteq!( mpc::PredictiveController, model::LinModel, transcription::MultipleShooting From 65693cf3f5ba166314ca94e594d2ea8da3ad8a5b Mon Sep 17 00:00:00 2001 From: franckgaga Date: Thu, 16 Oct 2025 12:53:12 -0400 Subject: [PATCH 16/17] test: remove deprecated coverage tests --- test/3_test_predictive_control.jl | 6 ------ 1 file changed, 6 deletions(-) diff --git a/test/3_test_predictive_control.jl b/test/3_test_predictive_control.jl index 9fb542eff..29840bc3e 100644 --- a/test/3_test_predictive_control.jl +++ b/test/3_test_predictive_control.jl @@ -805,12 +805,6 @@ end linmodel3 = LinModel{Float32}(0.5*ones(1,1), ones(1,1), ones(1,1), 0, 0, 3000.0) nmpc5 = NonLinMPC(nonlinmodel, Hp=1, Hc=1, Cwt=Inf, transcription=MultipleShooting()) nmpc5 = setconstraint!(nmpc5, ymin=[1]) - # execute update_predictions! branch in `gfunc_i` for coverage: - g_Y0min_end = nmpc5.optim[:g_Y0min_1].func - @test_nowarn g_Y0min_end(10.0, 9.0, 8.0, 7.0) - # execute update_predictions! branch in `geqfunc_i` for coverage: - geq_end = nmpc5.optim[:geq_2].func - @test_nowarn geq_end(5.0, 4.0, 3.0, 2.0) f! = (ẋ,x,u,_,_) -> ẋ .= -0.001x .+ u h! = (y,x,_,_) -> y .= x nonlinmodel_c = NonLinModel(f!, h!, 500, 1, 1, 1) From 82b70f90a3f158c9453e23a533fbf55e820bff53 Mon Sep 17 00:00:00 2001 From: franckgaga Date: Thu, 16 Oct 2025 17:23:05 -0400 Subject: [PATCH 17/17] changed: trunk `g` vector earlier This is more efficient with dense differentiation, in theory. I will compare the perf. in a separate PR. --- src/controller/nonlinmpc.jl | 52 ++++++++++++++++--------------------- 1 file changed, 23 insertions(+), 29 deletions(-) diff --git a/src/controller/nonlinmpc.jl b/src/controller/nonlinmpc.jl index 6057dc8bb..1082127cb 100644 --- a/src/controller/nonlinmpc.jl +++ b/src/controller/nonlinmpc.jl @@ -737,7 +737,8 @@ function get_nonlinops(mpc::NonLinMPC, optim::JuMP.GenericModel{JNT}) where JNT< nu, ny, nx̂, nϵ = model.nu, model.ny, mpc.estim.nx̂, mpc.nϵ nk = get_nk(model, transcription) Hp, Hc = mpc.Hp, mpc.Hc - ng, nc, neq = length(mpc.con.i_g), mpc.con.nc, mpc.con.neq + ng, ng_i_g = length(mpc.con.i_g), sum(mpc.con.i_g) + nc, neq = mpc.con.nc, mpc.con.neq nZ̃, nU, nŶ, nX̂, nK = length(mpc.Z̃), Hp*nu, Hp*ny, Hp*nx̂, Hp*nk nΔŨ, nUe, nŶe = nu*Hc + nϵ, nU + nu, nŶ + ny strict = Val(true) @@ -750,45 +751,40 @@ function get_nonlinops(mpc::NonLinMPC, optim::JuMP.GenericModel{JNT}) where JNT< U0::Vector{JNT}, Ŷ0::Vector{JNT} = zeros(JNT, nU), zeros(JNT, nŶ) Û0::Vector{JNT}, X̂0::Vector{JNT} = zeros(JNT, nU), zeros(JNT, nX̂) gc::Vector{JNT}, g::Vector{JNT} = zeros(JNT, nc), zeros(JNT, ng) + g_i_g::Vector{JNT} = zeros(JNT, ng_i_g) geq::Vector{JNT} = zeros(JNT, neq) # -------------- inequality constraint: nonlinear oracle ----------------------------- - function g!(g, Z̃, ΔŨ, x̂0end, Ue, Ŷe, U0, Ŷ0, Û0, K0, X̂0, gc, geq) + function g_i_g!(g_i_g, Z̃, ΔŨ, x̂0end, Ue, Ŷe, U0, Ŷ0, Û0, K0, X̂0, gc, geq, g) update_predictions!(ΔŨ, x̂0end, Ue, Ŷe, U0, Ŷ0, Û0, K0, X̂0, gc, g, geq, mpc, Z̃) + g_i_g .= @views g[mpc.con.i_g] return nothing end Z̃_∇g = fill(myNaN, nZ̃) # NaN to force update_predictions! at first call ∇g_context = ( Cache(ΔŨ), Cache(x̂0end), Cache(Ue), Cache(Ŷe), Cache(U0), Cache(Ŷ0), Cache(Û0), Cache(K0), Cache(X̂0), - Cache(gc), Cache(geq), + Cache(gc), Cache(geq), Cache(g) ) - ## temporarily enable all the inequality constraints for sparsity detection: - # mpc.con.i_g[1:end-nc] .= true - ∇g_prep = prepare_jacobian(g!, g, jac, Z̃_∇g, ∇g_context...; strict) - # mpc.con.i_g[1:end-nc] .= false - ∇g = init_diffmat(JNT, jac, ∇g_prep, nZ̃, ng) - function update_con!(g, ∇g, Z̃_∇g, Z̃_arg) + ∇g_prep = prepare_jacobian(g_i_g!, g_i_g, jac, Z̃_∇g, ∇g_context...; strict) + ∇g_i_g = init_diffmat(JNT, jac, ∇g_prep, nZ̃, ng) + function update_con!(g_i_g, ∇g_i_g, Z̃_∇g, Z̃_arg) if isdifferent(Z̃_arg, Z̃_∇g) Z̃_∇g .= Z̃_arg - value_and_jacobian!(g!, g, ∇g, ∇g_prep, jac, Z̃_∇g, ∇g_context...) + value_and_jacobian!(g_i_g!, g_i_g, ∇g_i_g, ∇g_prep, jac, Z̃_∇g, ∇g_context...) end return nothing end - function gfunc_oracle!(g_arg, Z̃_arg) - update_con!(g, ∇g, Z̃_∇g, Z̃_arg) - g_arg .= @views g[mpc.con.i_g] - return nothing + function gfunc_oracle!(g_vec, Z̃_arg) + update_con!(g_i_g, ∇g_i_g, Z̃_∇g, Z̃_arg) + return g_vec .= g_i_g end - ∇g_i_g = ∇g[mpc.con.i_g, :] - function ∇gfunc_oracle!(∇g_arg, Z̃_arg) - update_con!(g, ∇g, Z̃_∇g, Z̃_arg) - ∇g_i_g .= @views ∇g[mpc.con.i_g, :] - diffmat2vec!(∇g_arg, ∇g_i_g) - return nothing + function ∇gfunc_oracle!(∇g_vec, Z̃_arg) + update_con!(g_i_g, ∇g_i_g, Z̃_∇g, Z̃_arg) + return diffmat2vec!(∇g_vec, ∇g_i_g) end - g_min = fill(-myInf, sum(mpc.con.i_g)) - g_max = zeros(JNT, sum(mpc.con.i_g)) - ∇g_structure = init_diffstructure(∇g[mpc.con.i_g, :]) + g_min = fill(-myInf, ng_i_g) + g_max = zeros(JNT, ng_i_g) + ∇g_structure = init_diffstructure(∇g_i_g) g_oracle = Ipopt._VectorNonlinearOracle(; dimension = nZ̃, l = g_min, @@ -817,15 +813,13 @@ function get_nonlinops(mpc::NonLinMPC, optim::JuMP.GenericModel{JNT}) where JNT< end return nothing end - function geq_oracle!(geq_arg, Z̃_arg) + function geq_oracle!(geq_vec, Z̃_arg) update_con_eq!(geq, ∇geq, Z̃_∇geq, Z̃_arg) - geq_arg .= geq - return nothing + return geq_vec .= geq end - function ∇geq_oracle!(∇geq_arg, Z̃_arg) + function ∇geq_oracle!(∇geq_vec, Z̃_arg) update_con_eq!(geq, ∇geq, Z̃_∇geq, Z̃_arg) - diffmat2vec!(∇geq_arg, ∇geq) - return nothing + return diffmat2vec!(∇geq_vec, ∇geq) end geq_min = geq_max = zeros(JNT, neq) ∇geq_structure = init_diffstructure(∇geq)