@@ -533,143 +533,155 @@ function init_optimization!(
533533 init_nonlincon! (mpc, model, transcription, gfuncs, ∇gfuncs!, geqfuncs, ∇geqfuncs!)
534534 set_nonlincon! (mpc, model, transcription, optim)
535535 else
536- # ========= Test new experimental feature ========================================
537-
538- model = mpc. estim. model
539- jac = mpc. jacobian
540- nu, ny, nx̂, nϵ, nk = model. nu, model. ny, mpc. estim. nx̂, mpc. nϵ, model. nk
541- Hp, Hc = mpc. Hp, mpc. Hc
542- ng, nc, neq = length (mpc. con. i_g), mpc. con. nc, mpc. con. neq
543- nZ̃, nU, nŶ, nX̂, nK = length (mpc. Z̃), Hp* nu, Hp* ny, Hp* nx̂, Hp* nk
544- nΔŨ, nUe, nŶe = nu* Hc + nϵ, nU + nu, nŶ + ny
545- strict = Val (true )
546- myNaN = convert (JNT, NaN )
547- myInf = convert (JNT, Inf )
548-
549- ΔŨ:: Vector{JNT} = zeros (JNT, nΔŨ)
550- x̂0end:: Vector{JNT} = zeros (JNT, nx̂)
551- K0:: Vector{JNT} = zeros (JNT, nK)
552- Ue:: Vector{JNT} , Ŷe:: Vector{JNT} = zeros (JNT, nUe), zeros (JNT, nŶe)
553- U0:: Vector{JNT} , Ŷ0:: Vector{JNT} = zeros (JNT, nU), zeros (JNT, nŶ)
554- Û0:: Vector{JNT} , X̂0:: Vector{JNT} = zeros (JNT, nU), zeros (JNT, nX̂)
555- gc:: Vector{JNT} , g:: Vector{JNT} = zeros (JNT, nc), zeros (JNT, ng)
556- geq:: Vector{JNT} = zeros (JNT, neq)
557-
558-
559-
560- # TODO : transfer all the following in set_nonlincon!, including a copy-paste
561- # of all the vectors above.
562- function gfunc! (g, Z̃, ΔŨ, x̂0end, Ue, Ŷe, U0, Ŷ0, Û0, K0, X̂0, gc, geq)
563- update_predictions! (ΔŨ, x̂0end, Ue, Ŷe, U0, Ŷ0, Û0, K0, X̂0, gc, g, geq, mpc, Z̃)
564- return nothing
565- end
566- Z̃_∇g = fill (myNaN, nZ̃) # NaN to force update_predictions! at first call
567- ∇g_context = (
568- Cache (ΔŨ), Cache (x̂0end), Cache (Ue), Cache (Ŷe), Cache (U0), Cache (Ŷ0),
569- Cache (Û0), Cache (K0), Cache (X̂0),
570- Cache (gc), Cache (geq),
571- )
572- # temporarily enable all the inequality constraints for sparsity detection:
573- mpc. con. i_g[1 : end - nc] .= true
574- ∇g_prep = prepare_jacobian (gfunc!, g, jac, Z̃_∇g, ∇g_context... ; strict)
575- mpc. con. i_g[1 : end - nc] .= false
576- ∇g = init_diffmat (JNT, jac, ∇g_prep, nZ̃, ng)
536+ set_nonlincon_exp (mpc, optim)
537+ end
538+ return nothing
539+ end
577540
541+ # TODO : cleanup this function, this is super dirty
542+ function set_nonlincon_exp (mpc:: PredictiveController , optim:: JuMP.GenericModel{JNT} ) where JNT<: Real
543+ # ========= Test new experimental feature ========================================
578544
579- function update_con! (g, ∇g, Z̃_∇g, Z̃_arg)
580- if isdifferent (Z̃_arg, Z̃_∇g)
581- Z̃_∇g .= Z̃_arg
582- value_and_jacobian! (gfunc!, g, ∇g, ∇g_prep, jac, Z̃_∇g, ∇g_context... )
583- end
584- return nothing
585- end
586- function gfunc_set! (g_arg, Z̃_arg)
587- update_con! (g, ∇g, Z̃_∇g, Z̃_arg)
588- g_arg .= @views g[mpc. con. i_g]
589- return nothing
590- end
591- function ∇gfunc_set! (∇g_arg, Z̃_arg)
592- update_con! (g, ∇g, Z̃_∇g, Z̃_arg)
593- diffmat2vec! (∇g_arg, @views ∇g[mpc. con. i_g, :])
594- return nothing
595- end
545+ nonlin_constraints = JuMP. all_constraints (optim, JuMP. Vector{JuMP. VariableRef}, Ipopt. _VectorNonlinearOracle)
546+ map (con_ref -> JuMP. delete (optim, con_ref), nonlin_constraints)
596547
597- g_min = fill (- myInf, sum (mpc. con. i_g))
598- g_max = zeros (JNT, sum (mpc. con. i_g))
599548
600- ∇g_structure = init_diffstructure (∇g[mpc . con . i_g, :])
549+ Z̃var = optim[ :Z̃var ]
601550
602- g_set = Ipopt. _VectorNonlinearOracle (;
603- dimension = nZ̃,
604- l = g_min,
605- u = g_max,
606- eval_f = gfunc_set!,
607- jacobian_structure = ∇g_structure,
608- eval_jacobian = ∇gfunc_set!
609- )
610- @constraint (optim, Z̃var in g_set)
551+ model = mpc. estim. model
552+ jac = mpc. jacobian
553+ nu, ny, nx̂, nϵ, nk = model. nu, model. ny, mpc. estim. nx̂, mpc. nϵ, model. nk
554+ Hp, Hc = mpc. Hp, mpc. Hc
555+ ng, nc, neq = length (mpc. con. i_g), mpc. con. nc, mpc. con. neq
556+ nZ̃, nU, nŶ, nX̂, nK = length (mpc. Z̃), Hp* nu, Hp* ny, Hp* nx̂, Hp* nk
557+ nΔŨ, nUe, nŶe = nu* Hc + nϵ, nU + nu, nŶ + ny
558+ strict = Val (true )
559+ myNaN = convert (JNT, NaN )
560+ myInf = convert (JNT, Inf )
611561
612562
613-
614- function geqfunc! (geq, Z̃, ΔŨ, x̂0end, Ue, Ŷe, U0, Ŷ0, Û0, K0, X̂0, gc, g)
615- update_predictions! (ΔŨ, x̂0end, Ue, Ŷe, U0, Ŷ0, Û0, K0, X̂0, gc, g, geq, mpc, Z̃)
616- return nothing
617- end
618- Z̃_∇geq = fill (myNaN, nZ̃) # NaN to force update_predictions! at first call
619- ∇geq_context = (
620- Cache (ΔŨ), Cache (x̂0end), Cache (Ue), Cache (Ŷe), Cache (U0), Cache (Ŷ0),
621- Cache (Û0), Cache (K0), Cache (X̂0),
622- Cache (gc), Cache (g)
623- )
624- ∇geq_prep = prepare_jacobian (geqfunc!, geq, jac, Z̃_∇geq, ∇geq_context... ; strict)
625- ∇geq = init_diffmat (JNT, jac, ∇geq_prep, nZ̃, neq)
563+ ΔŨ:: Vector{JNT} = zeros (JNT, nΔŨ)
564+ x̂0end:: Vector{JNT} = zeros (JNT, nx̂)
565+ K0:: Vector{JNT} = zeros (JNT, nK)
566+ Ue:: Vector{JNT} , Ŷe:: Vector{JNT} = zeros (JNT, nUe), zeros (JNT, nŶe)
567+ U0:: Vector{JNT} , Ŷ0:: Vector{JNT} = zeros (JNT, nU), zeros (JNT, nŶ)
568+ Û0:: Vector{JNT} , X̂0:: Vector{JNT} = zeros (JNT, nU), zeros (JNT, nX̂)
569+ gc:: Vector{JNT} , g:: Vector{JNT} = zeros (JNT, nc), zeros (JNT, ng)
570+ geq:: Vector{JNT} = zeros (JNT, neq)
626571
627572
628- function update_con_eq! (geq, ∇geq, Z̃_∇geq, Z̃_arg)
629- if isdifferent (Z̃_arg, Z̃_∇geq)
630- Z̃_∇geq .= Z̃_arg
631- value_and_jacobian! (geqfunc!, geq, ∇geq, ∇geq_prep, jac, Z̃_∇geq, ∇geq_context... )
632- end
633- return nothing
634- end
635- function geqfunc_set! (geq_arg, Z̃_arg)
636- update_con_eq! (geq, ∇geq, Z̃_∇geq, Z̃_arg)
637- geq_arg .= geq
638- return nothing
639- end
640- function ∇geqfunc_set! (∇geq_arg, Z̃_arg)
641- update_con_eq! (geq, ∇geq, Z̃_∇geq, Z̃_arg)
642- diffmat2vec! (∇geq_arg, ∇geq)
643- return nothing
573+
574+ # TODO : transfer all the following in set_nonlincon!, including a copy-paste
575+ # of all the vectors above.
576+ function gfunc! (g, Z̃, ΔŨ, x̂0end, Ue, Ŷe, U0, Ŷ0, Û0, K0, X̂0, gc, geq)
577+ update_predictions! (ΔŨ, x̂0end, Ue, Ŷe, U0, Ŷ0, Û0, K0, X̂0, gc, g, geq, mpc, Z̃)
578+ return nothing
579+ end
580+ Z̃_∇g = fill (myNaN, nZ̃) # NaN to force update_predictions! at first call
581+ ∇g_context = (
582+ Cache (ΔŨ), Cache (x̂0end), Cache (Ue), Cache (Ŷe), Cache (U0), Cache (Ŷ0),
583+ Cache (Û0), Cache (K0), Cache (X̂0),
584+ Cache (gc), Cache (geq),
585+ )
586+ # temporarily enable all the inequality constraints for sparsity detection:
587+ mpc. con. i_g[1 : end - nc] .= true
588+ ∇g_prep = prepare_jacobian (gfunc!, g, jac, Z̃_∇g, ∇g_context... ; strict)
589+ mpc. con. i_g[1 : end - nc] .= false
590+ ∇g = init_diffmat (JNT, jac, ∇g_prep, nZ̃, ng)
591+
592+
593+ function update_con! (g, ∇g, Z̃_∇g, Z̃_arg)
594+ if isdifferent (Z̃_arg, Z̃_∇g)
595+ Z̃_∇g .= Z̃_arg
596+ value_and_jacobian! (gfunc!, g, ∇g, ∇g_prep, jac, Z̃_∇g, ∇g_context... )
644597 end
598+ return nothing
599+ end
600+ function gfunc_set! (g_arg, Z̃_arg)
601+ update_con! (g, ∇g, Z̃_∇g, Z̃_arg)
602+ g_arg .= @views g[mpc. con. i_g]
603+ return nothing
604+ end
605+ function ∇gfunc_set! (∇g_arg, Z̃_arg)
606+ update_con! (g, ∇g, Z̃_∇g, Z̃_arg)
607+ diffmat2vec! (∇g_arg, @views ∇g[mpc. con. i_g, :])
608+ return nothing
609+ end
645610
646- geq_min = zeros (JNT, mpc. con. neq)
647- geq_max = zeros (JNT, mpc. con. neq)
611+ g_min = fill (- myInf, sum (mpc. con. i_g))
612+ g_max = zeros (JNT, sum (mpc. con. i_g))
613+
614+ ∇g_structure = init_diffstructure (∇g[mpc. con. i_g, :])
615+
616+ g_set = Ipopt. _VectorNonlinearOracle (;
617+ dimension = nZ̃,
618+ l = g_min,
619+ u = g_max,
620+ eval_f = gfunc_set!,
621+ jacobian_structure = ∇g_structure,
622+ eval_jacobian = ∇gfunc_set!
623+ )
624+ @constraint (optim, Z̃var in g_set)
648625
649- ∇geq_structure = init_diffstructure (∇geq)
650626
651- #=
652- # Langragian of the optimization problem:
653- function Lfunc!(Z̃, μ, ΔŨ, x̂0end, Ue, Ŷe, U0, Ŷ0, Û0, K0, X̂0, gc, g, geq)
654- update_predictions!(ΔŨ, x̂0end, Ue, Ŷe, U0, Ŷ0, Û0, K0, X̂0, gc, g, geq, mpc, Z̃)
655- J = obj_nonlinprog!(Ŷ0, U0, mpc, model, Ue, Ŷe, ΔŨ)
656- L = J + dot(μ, geq)
657- return L
627+
628+ function geqfunc! (geq, Z̃, ΔŨ, x̂0end, Ue, Ŷe, U0, Ŷ0, Û0, K0, X̂0, gc, g)
629+ update_predictions! (ΔŨ, x̂0end, Ue, Ŷe, U0, Ŷ0, Û0, K0, X̂0, gc, g, geq, mpc, Z̃)
630+ return nothing
631+ end
632+ Z̃_∇geq = fill (myNaN, nZ̃) # NaN to force update_predictions! at first call
633+ ∇geq_context = (
634+ Cache (ΔŨ), Cache (x̂0end), Cache (Ue), Cache (Ŷe), Cache (U0), Cache (Ŷ0),
635+ Cache (Û0), Cache (K0), Cache (X̂0),
636+ Cache (gc), Cache (g)
637+ )
638+ ∇geq_prep = prepare_jacobian (geqfunc!, geq, jac, Z̃_∇geq, ∇geq_context... ; strict)
639+ ∇geq = init_diffmat (JNT, jac, ∇geq_prep, nZ̃, neq)
640+
641+
642+ function update_con_eq! (geq, ∇geq, Z̃_∇geq, Z̃_arg)
643+ if isdifferent (Z̃_arg, Z̃_∇geq)
644+ Z̃_∇geq .= Z̃_arg
645+ value_and_jacobian! (geqfunc!, geq, ∇geq, ∇geq_prep, jac, Z̃_∇geq, ∇geq_context... )
658646 end
659- =#
660-
661-
662- geq_set = Ipopt . _VectorNonlinearOracle (;
663- dimension = nZ̃,
664- l = geq_min,
665- u = geq_max,
666- eval_f = geqfunc_set!,
667- jacobian_structure = ∇geq_structure,
668- eval_jacobian = ∇geqfunc_set!
669- )
670- @constraint (optim, Z̃var in geq_set)
647+ return nothing
648+ end
649+ function geqfunc_set! (geq_arg, Z̃_arg)
650+ update_con_eq! (geq, ∇geq, Z̃_∇geq, Z̃_arg)
651+ geq_arg . = geq
652+ return nothing
653+ end
654+ function ∇ geqfunc_set!(∇geq_arg, Z̃_arg)
655+ update_con_eq! (geq, ∇geq, Z̃_∇geq, Z̃_arg)
656+ diffmat2vec! (∇geq_arg, ∇geq)
657+ return nothing
658+ end
671659
672- end
660+ geq_min = zeros (JNT, mpc. con. neq)
661+ geq_max = zeros (JNT, mpc. con. neq)
662+
663+ ∇geq_structure = init_diffstructure (∇geq)
664+
665+ #=
666+ # Langragian of the optimization problem:
667+ function Lfunc!(Z̃, μ, ΔŨ, x̂0end, Ue, Ŷe, U0, Ŷ0, Û0, K0, X̂0, gc, g, geq)
668+ update_predictions!(ΔŨ, x̂0end, Ue, Ŷe, U0, Ŷ0, Û0, K0, X̂0, gc, g, geq, mpc, Z̃)
669+ J = obj_nonlinprog!(Ŷ0, U0, mpc, model, Ue, Ŷe, ΔŨ)
670+ L = J + dot(μ, geq)
671+ return L
672+ end
673+ =#
674+
675+
676+ geq_set = Ipopt. _VectorNonlinearOracle (;
677+ dimension = nZ̃,
678+ l = geq_min,
679+ u = geq_max,
680+ eval_f = geqfunc_set!,
681+ jacobian_structure = ∇geq_structure,
682+ eval_jacobian = ∇geqfunc_set!
683+ )
684+ @constraint (optim, Z̃var in geq_set)
673685 return nothing
674686end
675687
0 commit comments