514514
515515Init the nonlinear optimization for [`NonLinMPC`](@ref) controllers.
516516"""
517- function init_optimization!(mpc:: NonLinMPC , model:: SimModel , optim:: JuMP.GenericModel )
517+ function init_optimization!(
518+ mpc:: NonLinMPC , model:: SimModel , optim:: JuMP.GenericModel{JNT}
519+ ) where JNT<: Real
518520 # --- variables and linear constraints ---
519521 con, transcription = mpc. con, mpc. transcription
520522 nZ̃ = length(mpc. Z̃)
@@ -538,27 +540,37 @@ function init_optimization!(mpc::NonLinMPC, model::SimModel, optim::JuMP.Generic
538540 JuMP. set_attribute(optim, " nlp_scaling_max_gradient" , 10.0 / C)
539541 end
540542 end
541- Jfunc, ∇Jfunc!, gfuncs, ∇gfuncs!, geqfuncs, ∇geqfuncs! = get_optim_functions(
542- mpc, optim
543- )
544- @operator(optim, J, nZ̃, Jfunc, ∇Jfunc!)
545- @objective(optim, Min, J(Z̃var... ))
546- init_nonlincon!(mpc, model, transcription, gfuncs, ∇gfuncs!, geqfuncs, ∇geqfuncs!)
547- set_nonlincon!(mpc, model, transcription, optim)
543+ if JuMP. solver_name(optim) ≠ " Ipopt"
544+ # everything with the splatting syntax:
545+ J_func, ∇J_func!, g_funcs, ∇g_funcs!, geq_funcs, ∇geq_funcs! = get_optim_functions(
546+ mpc, optim
547+ )
548+ else
549+ # constraints with vector nonlinear oracle, objective function with splatting:
550+ g_oracle, geq_oracle, J_func, ∇J_func! = get_nonlinops(mpc, optim)
551+ end
552+ @operator(optim, J_op, nZ̃, J_func, ∇J_func!)
553+ @objective(optim, Min, J_op(Z̃var... ))
554+ if JuMP. solver_name(optim) ≠ " Ipopt"
555+ init_nonlincon!(mpc, model, transcription, g_funcs, ∇g_funcs!, geq_funcs, ∇geq_funcs!)
556+ set_nonlincon!(mpc, model, transcription, optim)
557+ else
558+ set_nonlincon_exp!(mpc, transcription, g_oracle, geq_oracle)
559+ end
548560 return nothing
549561end
550562
551563"""
552564 get_optim_functions(
553565 mpc::NonLinMPC, optim::JuMP.GenericModel
554- ) -> Jfunc , ∇Jfunc !, gfuncs , ∇gfuncs !, geqfuncs , ∇geqfuncs !
566+ ) -> J_func , ∇J_func !, g_funcs , ∇g_funcs !, geq_funcs , ∇geq_funcs !
555567
556568Return the functions for the nonlinear optimization of `mpc` [`NonLinMPC`](@ref) controller.
557569
558- Return the nonlinear objective `Jfunc ` function, and `∇Jfunc !`, to compute its gradient.
559- Also return vectors with the nonlinear inequality constraint functions `gfuncs `, and
560- `∇gfuncs !`, for the associated gradients. Lastly, also return vectors with the nonlinear
561- equality constraint functions `geqfuncs ` and gradients `∇geqfuncs !`.
570+ Return the nonlinear objective `J_func ` function, and `∇J_func !`, to compute its gradient.
571+ Also return vectors with the nonlinear inequality constraint functions `g_funcs `, and
572+ `∇g_funcs !`, for the associated gradients. Lastly, also return vectors with the nonlinear
573+ equality constraint functions `geq_funcs ` and gradients `∇geq_funcs !`.
562574
563575This method is really intricate and I'm not proud of it. That's because of 3 elements:
564576
@@ -615,11 +627,11 @@ function get_optim_functions(mpc::NonLinMPC, ::JuMP.GenericModel{JNT}) where JNT
615627 J[], _ = value_and_gradient!(Jfunc!, ∇J, ∇J_prep, grad, Z̃_∇J, ∇J_context... )
616628 end
617629 end
618- function Jfunc (Z̃arg:: Vararg{T, N} ) where {N, T<: Real }
630+ function J_func (Z̃arg:: Vararg{T, N} ) where {N, T<: Real }
619631 update_objective!(J, ∇J, Z̃_∇J, Z̃arg)
620632 return J[]:: T
621633 end
622- ∇Jfunc ! = if nZ̃ == 1 # univariate syntax (see JuMP.@operator doc):
634+ ∇J_func ! = if nZ̃ == 1 # univariate syntax (see JuMP.@operator doc):
623635 function (Z̃arg)
624636 update_objective!(J, ∇J, Z̃_∇J, Z̃arg)
625637 return ∇J[begin ]
@@ -652,16 +664,16 @@ function get_optim_functions(mpc::NonLinMPC, ::JuMP.GenericModel{JNT}) where JNT
652664 value_and_jacobian!(gfunc!, g, ∇g, ∇g_prep, jac, Z̃_∇g, ∇g_context... )
653665 end
654666 end
655- gfuncs = Vector{Function}(undef, ng)
656- for i in eachindex(gfuncs )
667+ g_funcs = Vector{Function}(undef, ng)
668+ for i in eachindex(g_funcs )
657669 gfunc_i = function (Z̃arg:: Vararg{T, N} ) where {N, T<: Real }
658670 update_con!(g, ∇g, Z̃_∇g, Z̃arg)
659671 return g[i]:: T
660672 end
661- gfuncs [i] = gfunc_i
673+ g_funcs [i] = gfunc_i
662674 end
663- ∇gfuncs ! = Vector{Function}(undef, ng)
664- for i in eachindex(∇gfuncs !)
675+ ∇g_funcs ! = Vector{Function}(undef, ng)
676+ for i in eachindex(∇g_funcs !)
665677 ∇gfuncs_i! = if nZ̃ == 1 # univariate syntax (see JuMP.@operator doc):
666678 function (Z̃arg:: T ) where T<: Real
667679 update_con!(g, ∇g, Z̃_∇g, Z̃arg)
@@ -673,7 +685,7 @@ function get_optim_functions(mpc::NonLinMPC, ::JuMP.GenericModel{JNT}) where JNT
673685 return ∇g_i .= @views ∇g[i, :]
674686 end
675687 end
676- ∇gfuncs ![i] = ∇gfuncs_i!
688+ ∇g_funcs ![i] = ∇gfuncs_i!
677689 end
678690 # --------------------- equality constraint functions ---------------------------------
679691 function geqfunc!(geq, Z̃, ΔŨ, x̂0end, Ue, Ŷe, U0, Ŷ0, Û0, K0, X̂0, gc, g)
@@ -694,28 +706,175 @@ function get_optim_functions(mpc::NonLinMPC, ::JuMP.GenericModel{JNT}) where JNT
694706 value_and_jacobian!(geqfunc!, geq, ∇geq, ∇geq_prep, jac, Z̃_∇geq, ∇geq_context... )
695707 end
696708 end
697- geqfuncs = Vector{Function}(undef, neq)
698- for i in eachindex(geqfuncs )
709+ geq_funcs = Vector{Function}(undef, neq)
710+ for i in eachindex(geq_funcs )
699711 geqfunc_i = function (Z̃arg:: Vararg{T, N} ) where {N, T<: Real }
700712 update_con_eq!(geq, ∇geq, Z̃_∇geq, Z̃arg)
701713 return geq[i]:: T
702714 end
703- geqfuncs [i] = geqfunc_i
715+ geq_funcs [i] = geqfunc_i
704716 end
705- ∇geqfuncs ! = Vector{Function}(undef, neq)
706- for i in eachindex(∇geqfuncs !)
717+ ∇geq_funcs ! = Vector{Function}(undef, neq)
718+ for i in eachindex(∇geq_funcs !)
707719 # only multivariate syntax, univariate is impossible since nonlinear equality
708720 # constraints imply MultipleShooting, thus input increment ΔU and state X̂0 in Z̃:
709721 ∇geqfuncs_i! =
710722 function (∇geq_i, Z̃arg:: Vararg{T, N} ) where {N, T<: Real }
711723 update_con_eq!(geq, ∇geq, Z̃_∇geq, Z̃arg)
712724 return ∇geq_i .= @views ∇geq[i, :]
713725 end
714- ∇geqfuncs ![i] = ∇geqfuncs_i!
726+ ∇geq_funcs ![i] = ∇geqfuncs_i!
715727 end
716- return Jfunc , ∇Jfunc !, gfuncs , ∇gfuncs !, geqfuncs , ∇geqfuncs !
728+ return J_func , ∇J_func !, g_funcs , ∇g_funcs !, geq_funcs , ∇geq_funcs !
717729end
718730
731+ # TODO : move docstring of method above here an re-work it
732+ function get_nonlinops(mpc:: NonLinMPC , optim:: JuMP.GenericModel{JNT} ) where JNT<: Real
733+ # ----------- common cache for all functions ----------------------------------------
734+ model = mpc. estim. model
735+ transcription = mpc. transcription
736+ grad, jac = mpc. gradient, mpc. jacobian
737+ nu, ny, nx̂, nϵ = model. nu, model. ny, mpc. estim. nx̂, mpc. nϵ
738+ nk = get_nk(model, transcription)
739+ Hp, Hc = mpc. Hp, mpc. Hc
740+ ng, nc, neq = length(mpc. con. i_g), mpc. con. nc, mpc. con. neq
741+ nZ̃, nU, nŶ, nX̂, nK = length(mpc. Z̃), Hp* nu, Hp* ny, Hp* nx̂, Hp* nk
742+ nΔŨ, nUe, nŶe = nu* Hc + nϵ, nU + nu, nŶ + ny
743+ strict = Val(true )
744+ myNaN, myInf = convert(JNT, NaN ), convert(JNT, Inf )
745+ J:: Vector{JNT} = zeros(JNT, 1 )
746+ ΔŨ:: Vector{JNT} = zeros(JNT, nΔŨ)
747+ x̂0end:: Vector{JNT} = zeros(JNT, nx̂)
748+ K0:: Vector{JNT} = zeros(JNT, nK)
749+ Ue:: Vector{JNT} , Ŷe:: Vector{JNT} = zeros(JNT, nUe), zeros(JNT, nŶe)
750+ U0:: Vector{JNT} , Ŷ0:: Vector{JNT} = zeros(JNT, nU), zeros(JNT, nŶ)
751+ Û0:: Vector{JNT} , X̂0:: Vector{JNT} = zeros(JNT, nU), zeros(JNT, nX̂)
752+ gc:: Vector{JNT} , g:: Vector{JNT} = zeros(JNT, nc), zeros(JNT, ng)
753+ geq:: Vector{JNT} = zeros(JNT, neq)
754+ # -------------- inequality constraint: nonlinear oracle -----------------------------
755+ function g!(g, Z̃, ΔŨ, x̂0end, Ue, Ŷe, U0, Ŷ0, Û0, K0, X̂0, gc, geq)
756+ update_predictions!(ΔŨ, x̂0end, Ue, Ŷe, U0, Ŷ0, Û0, K0, X̂0, gc, g, geq, mpc, Z̃)
757+ return nothing
758+ end
759+ Z̃_∇g = fill(myNaN, nZ̃) # NaN to force update_predictions! at first call
760+ ∇g_context = (
761+ Cache(ΔŨ), Cache(x̂0end), Cache(Ue), Cache(Ŷe), Cache(U0), Cache(Ŷ0),
762+ Cache(Û0), Cache(K0), Cache(X̂0),
763+ Cache(gc), Cache(geq),
764+ )
765+ # # temporarily enable all the inequality constraints for sparsity detection:
766+ # mpc.con.i_g[1:end-nc] .= true
767+ ∇g_prep = prepare_jacobian(g!, g, jac, Z̃_∇g, ∇g_context... ; strict)
768+ # mpc.con.i_g[1:end-nc] .= false
769+ ∇g = init_diffmat(JNT, jac, ∇g_prep, nZ̃, ng)
770+ function update_con!(g, ∇g, Z̃_∇g, Z̃_arg)
771+ if isdifferent(Z̃_arg, Z̃_∇g)
772+ Z̃_∇g .= Z̃_arg
773+ value_and_jacobian!(g!, g, ∇g, ∇g_prep, jac, Z̃_∇g, ∇g_context... )
774+ end
775+ return nothing
776+ end
777+ function gfunc_oracle!(g_arg, Z̃_arg)
778+ update_con!(g, ∇g, Z̃_∇g, Z̃_arg)
779+ g_arg .= @views g[mpc. con. i_g]
780+ return nothing
781+ end
782+ ∇g_i_g = ∇g[mpc. con. i_g, :]
783+ function ∇gfunc_oracle!(∇g_arg, Z̃_arg)
784+ update_con!(g, ∇g, Z̃_∇g, Z̃_arg)
785+ ∇g_i_g .= @views ∇g[mpc. con. i_g, :]
786+ diffmat2vec!(∇g_arg, ∇g_i_g)
787+ return nothing
788+ end
789+ g_min = fill(- myInf, sum(mpc. con. i_g))
790+ g_max = zeros(JNT, sum(mpc. con. i_g))
791+ ∇g_structure = init_diffstructure(∇g[mpc. con. i_g, :])
792+ g_oracle = Ipopt. _VectorNonlinearOracle(;
793+ dimension = nZ̃,
794+ l = g_min,
795+ u = g_max,
796+ eval_f = gfunc_oracle!,
797+ jacobian_structure = ∇g_structure,
798+ eval_jacobian = ∇gfunc_oracle!
799+ )
800+ # ------------- equality constraints : nonlinear oracle ------------------------------
801+ function geq!(geq, Z̃, ΔŨ, x̂0end, Ue, Ŷe, U0, Ŷ0, Û0, K0, X̂0, gc, g)
802+ update_predictions!(ΔŨ, x̂0end, Ue, Ŷe, U0, Ŷ0, Û0, K0, X̂0, gc, g, geq, mpc, Z̃)
803+ return nothing
804+ end
805+ Z̃_∇geq = fill(myNaN, nZ̃) # NaN to force update_predictions! at first call
806+ ∇geq_context = (
807+ Cache(ΔŨ), Cache(x̂0end), Cache(Ue), Cache(Ŷe), Cache(U0), Cache(Ŷ0),
808+ Cache(Û0), Cache(K0), Cache(X̂0),
809+ Cache(gc), Cache(g)
810+ )
811+ ∇geq_prep = prepare_jacobian(geq!, geq, jac, Z̃_∇geq, ∇geq_context... ; strict)
812+ ∇geq = init_diffmat(JNT, jac, ∇geq_prep, nZ̃, neq)
813+ function update_con_eq!(geq, ∇geq, Z̃_∇geq, Z̃_arg)
814+ if isdifferent(Z̃_arg, Z̃_∇geq)
815+ Z̃_∇geq .= Z̃_arg
816+ value_and_jacobian!(geq!, geq, ∇geq, ∇geq_prep, jac, Z̃_∇geq, ∇geq_context... )
817+ end
818+ return nothing
819+ end
820+ function geq_oracle!(geq_arg, Z̃_arg)
821+ update_con_eq!(geq, ∇geq, Z̃_∇geq, Z̃_arg)
822+ geq_arg .= geq
823+ return nothing
824+ end
825+ function ∇geq_oracle!(∇geq_arg, Z̃_arg)
826+ update_con_eq!(geq, ∇geq, Z̃_∇geq, Z̃_arg)
827+ diffmat2vec!(∇geq_arg, ∇geq)
828+ return nothing
829+ end
830+ geq_min = geq_max = zeros(JNT, neq)
831+ ∇geq_structure = init_diffstructure(∇geq)
832+ geq_oracle = Ipopt. _VectorNonlinearOracle(;
833+ dimension = nZ̃,
834+ l = geq_min,
835+ u = geq_max,
836+ eval_f = geq_oracle!,
837+ jacobian_structure = ∇geq_structure,
838+ eval_jacobian = ∇geq_oracle!
839+ )
840+ # ------------- objective function: splatting syntax ---------------------------------
841+ function J!(Z̃, ΔŨ, x̂0end, Ue, Ŷe, U0, Ŷ0, Û0, K0, X̂0, gc, g, geq)
842+ update_predictions!(ΔŨ, x̂0end, Ue, Ŷe, U0, Ŷ0, Û0, K0, X̂0, gc, g, geq, mpc, Z̃)
843+ return obj_nonlinprog!(Ŷ0, U0, mpc, model, Ue, Ŷe, ΔŨ)
844+ end
845+ Z̃_∇J = fill(myNaN, nZ̃) # NaN to force update_predictions! at first call
846+ ∇J_context = (
847+ Cache(ΔŨ), Cache(x̂0end), Cache(Ue), Cache(Ŷe), Cache(U0), Cache(Ŷ0),
848+ Cache(Û0), Cache(K0), Cache(X̂0),
849+ Cache(gc), Cache(g), Cache(geq),
850+ )
851+ ∇J_prep = prepare_gradient(J!, grad, Z̃_∇J, ∇J_context... ; strict)
852+ ∇J = Vector{JNT}(undef, nZ̃)
853+ function update_objective!(J, ∇J, Z̃_∇J, Z̃arg)
854+ if isdifferent(Z̃arg, Z̃_∇J)
855+ Z̃_∇J .= Z̃arg
856+ J[], _ = value_and_gradient!(J!, ∇J, ∇J_prep, grad, Z̃_∇J, ∇J_context... )
857+ end
858+ end
859+ function J_func(Z̃arg:: Vararg{T, N} ) where {N, T<: Real }
860+ update_objective!(J, ∇J, Z̃_∇J, Z̃arg)
861+ return J[]:: T
862+ end
863+ ∇J_func! = if nZ̃ == 1 # univariate syntax (see JuMP.@operator doc):
864+ function (Z̃arg)
865+ update_objective!(J, ∇J, Z̃_∇J, Z̃arg)
866+ return ∇J[]
867+ end
868+ else # multivariate syntax (see JuMP.@operator doc):
869+ function (∇Jarg:: AbstractVector{T} , Z̃arg:: Vararg{T, N} ) where {N, T<: Real }
870+ update_objective!(J, ∇J, Z̃_∇J, Z̃arg)
871+ return ∇Jarg .= ∇J
872+ end
873+ end
874+ return g_oracle, geq_oracle, J_func, ∇J_func!
875+ end
876+
877+
719878"""
720879 update_predictions!(
721880 ΔŨ, x̂0end, Ue, Ŷe, U0, Ŷ0, Û0, K0, X̂0, gc, g, geq,
@@ -741,6 +900,20 @@ function update_predictions!(
741900 return nothing
742901end
743902
903+ function set_nonlincon_exp!(
904+ mpc:: NonLinMPC , :: TranscriptionMethod , g_oracle, geq_oracle
905+ )
906+ optim = mpc. optim
907+ Z̃var = optim[:Z̃var]
908+ nonlin_constraints = JuMP. all_constraints(
909+ optim, JuMP. Vector{JuMP. VariableRef}, Ipopt. _VectorNonlinearOracle
910+ )
911+ map(con_ref -> JuMP. delete(optim, con_ref), nonlin_constraints)
912+ @constraint(optim, Z̃var in g_oracle)
913+ mpc. con. neq > 0 && @constraint(optim, Z̃var in geq_oracle)
914+ return nothing
915+ end
916+
744917@doc raw """
745918 con_custom!(gc, mpc::NonLinMPC, Ue, Ŷe, ϵ) -> gc
746919
0 commit comments