1- const DEFAULT_NONLINMPC_OPTIMIZER = optimizer_with_attributes (Ipopt. Optimizer," sb" => " yes" )
21const DEFAULT_NONLINMPC_TRANSCRIPTION = SingleShooting ()
2+ const DEFAULT_NONLINMPC_OPTIMIZER = optimizer_with_attributes (Ipopt. Optimizer," sb" => " yes" )
3+ const DEFAULT_NONLINMPC_GRADIENT = AutoForwardDiff ()
34
45struct NonLinMPC{
56 NT<: Real ,
@@ -52,7 +53,7 @@ struct NonLinMPC{
5253 function NonLinMPC {NT} (
5354 estim:: SE ,
5455 Hp, Hc, M_Hp, N_Hc, L_Hp, Cwt, Ewt, JE:: JEfunc , gc!:: GCfunc , nc, p:: PT ,
55- transcription:: TM , optim:: JM
56+ transcription:: TM , optim:: JM , gradient, jacobian
5657 ) where {
5758 NT<: Real ,
5859 SE<: StateEstimator ,
@@ -109,7 +110,7 @@ struct NonLinMPC{
109110 Uop, Yop, Dop,
110111 buffer
111112 )
112- init_optimization! (mpc, model, optim)
113+ init_optimization! (mpc, model, optim, gradient, jacobian )
113114 return mpc
114115 end
115116end
@@ -189,6 +190,10 @@ This controller allocates memory at each time step for the optimization.
189190- `optim=JuMP.Model(Ipopt.Optimizer)` : nonlinear optimizer used in the predictive
190191 controller, provided as a [`JuMP.Model`](https://jump.dev/JuMP.jl/stable/api/JuMP/#JuMP.Model)
191192 (default to [`Ipopt`](https://github.com/jump-dev/Ipopt.jl) optimizer).
193+ - `gradient=AutoForwardDiff()` : an `AbstractADType` backend for the gradient of the objective
194+ function, see [`DifferentiationInterface`](https://juliadiff.org/DifferentiationInterface.jl/DifferentiationInterface/stable/explanation/backends/#List).
195+ - `jacobian=default_jacobian(transcription)` : an `AbstractADType` backend for the Jacobian
196+ of the nonlinear constraints, see `gradient` above for the options (default in Extended Help).
192197- additional keyword arguments are passed to [`UnscentedKalmanFilter`](@ref) constructor
193198 (or [`SteadyKalmanFilter`](@ref), for [`LinModel`](@ref)).
194199
@@ -265,13 +270,15 @@ function NonLinMPC(
265270 p = model. p,
266271 transcription:: TranscriptionMethod = DEFAULT_NONLINMPC_TRANSCRIPTION,
267272 optim:: JuMP.GenericModel = JuMP. Model (DEFAULT_NONLINMPC_OPTIMIZER, add_bridges= false ),
273+ gradient:: AbstractADType = DEFAULT_NONLINMPC_GRADIENT,
274+ jacobian:: AbstractADType = default_jacobian (transcription),
268275 kwargs...
269276)
270277 estim = UnscentedKalmanFilter (model; kwargs... )
271278 return NonLinMPC (
272279 estim;
273280 Hp, Hc, Mwt, Nwt, Lwt, Cwt, Ewt, JE, gc, nc, p, M_Hp, N_Hc, L_Hp,
274- transcription, optim
281+ transcription, optim, gradient, jacobian
275282 )
276283end
277284
@@ -294,13 +301,15 @@ function NonLinMPC(
294301 p = model. p,
295302 transcription:: TranscriptionMethod = DEFAULT_NONLINMPC_TRANSCRIPTION,
296303 optim:: JuMP.GenericModel = JuMP. Model (DEFAULT_NONLINMPC_OPTIMIZER, add_bridges= false ),
304+ gradient:: AbstractADType = DEFAULT_NONLINMPC_GRADIENT,
305+ jacobian:: AbstractADType = default_jacobian (transcription),
297306 kwargs...
298307)
299308 estim = SteadyKalmanFilter (model; kwargs... )
300309 return NonLinMPC (
301310 estim;
302311 Hp, Hc, Mwt, Nwt, Lwt, Cwt, Ewt, JE, gc, nc, p, M_Hp, N_Hc, L_Hp,
303- transcription, optim
312+ transcription, optim, gradient, jacobian
304313 )
305314end
306315
@@ -347,6 +356,8 @@ function NonLinMPC(
347356 p = estim. model. p,
348357 transcription:: TranscriptionMethod = DEFAULT_NONLINMPC_TRANSCRIPTION,
349358 optim:: JuMP.GenericModel = JuMP. Model (DEFAULT_NONLINMPC_OPTIMIZER, add_bridges= false ),
359+ gradient:: AbstractADType = DEFAULT_NONLINMPC_GRADIENT,
360+ jacobian:: AbstractADType = default_jacobian (transcription),
350361) where {
351362 NT<: Real ,
352363 SE<: StateEstimator{NT}
@@ -360,10 +371,13 @@ function NonLinMPC(
360371 gc! = get_mutating_gc (NT, gc)
361372 return NonLinMPC {NT} (
362373 estim, Hp, Hc, M_Hp, N_Hc, L_Hp, Cwt, Ewt, JE, gc!, nc, p,
363- transcription, optim
374+ transcription, optim, gradient, jacobian
364375 )
365376end
366377
378+ default_jacobian (:: SingleShooting ) = AutoForwardDiff ()
379+ default_jacobian (:: TranscriptionMethod ) = AutoForwardDiff ()
380+
367381"""
368382 validate_JE(NT, JE) -> nothing
369383
@@ -477,11 +491,23 @@ function addinfo!(info, mpc::NonLinMPC{NT}) where NT<:Real
477491end
478492
479493"""
480- init_optimization!(mpc::NonLinMPC, model::SimModel, optim)
494+ init_optimization!(
495+ mpc::NonLinMPC,
496+ model::SimModel,
497+ optim::JuMP.GenericModel,
498+ gradient::AbstractADType,
499+ Jacobian::AbstractADType
500+ ) -> nothing
481501
482502Init the nonlinear optimization for [`NonLinMPC`](@ref) controllers.
483503"""
484- function init_optimization! (mpc:: NonLinMPC , model:: SimModel , optim)
504+ function init_optimization! (
505+ mpc:: NonLinMPC ,
506+ model:: SimModel ,
507+ optim:: JuMP.GenericModel ,
508+ gradient:: AbstractADType ,
509+ jacobian:: AbstractADType
510+ )
485511 # --- variables and linear constraints ---
486512 con, transcription = mpc. con, mpc. transcription
487513 nZ̃ = length (mpc. Z̃)
@@ -505,7 +531,9 @@ function init_optimization!(mpc::NonLinMPC, model::SimModel, optim)
505531 JuMP. set_attribute (optim, " nlp_scaling_max_gradient" , 10.0 / C)
506532 end
507533 end
508- Jfunc, ∇Jfunc!, gfuncs, ∇gfuncs!, geqfuncs, ∇geqfuncs! = get_optim_functions (mpc, optim)
534+ Jfunc, ∇Jfunc!, gfuncs, ∇gfuncs!, geqfuncs, ∇geqfuncs! = get_optim_functions (
535+ mpc, optim, gradient, jacobian
536+ )
509537 @operator (optim, J, nZ̃, Jfunc, ∇Jfunc!)
510538 @objective (optim, Min, J (Z̃var... ))
511539 init_nonlincon! (mpc, model, transcription, gfuncs, ∇gfuncs!, geqfuncs, ∇geqfuncs!)
515543
516544"""
517545 get_optim_functions(
518- mpc::NonLinMPC, optim::JuMP.GenericModel
546+ mpc::NonLinMPC,
547+ optim::JuMP.GenericModel,
548+ grad_backend::AbstractADType,
549+ jac_backend ::AbstractADType
519550 ) -> Jfunc, ∇Jfunc!, gfuncs, ∇gfuncs!, geqfuncs, ∇geqfuncs!
520551
521552Return the functions for the nonlinear optimization of `mpc` [`NonLinMPC`](@ref) controller.
@@ -538,7 +569,12 @@ This method is really intricate and I'm not proud of it. That's because of 3 ele
538569
539570Inspired from: [User-defined operators with vector outputs](https://jump.dev/JuMP.jl/stable/tutorials/nonlinear/tips_and_tricks/#User-defined-operators-with-vector-outputs)
540571"""
541- function get_optim_functions (mpc:: NonLinMPC , :: JuMP.GenericModel{JNT} ) where JNT<: Real
572+ function get_optim_functions (
573+ mpc:: NonLinMPC ,
574+ optim:: JuMP.GenericModel{JNT} ,
575+ grad_backend:: AbstractADType ,
576+ jac_backend :: AbstractADType
577+ ) where JNT<: Real
542578 model, transcription = mpc. estim. model, mpc. transcription
543579 nu, ny, nx̂, nϵ, Hp, Hc = model. nu, model. ny, mpc. estim. nx̂, mpc. nϵ, mpc. Hp, mpc. Hc
544580 ng, nc, neq = length (mpc. con. i_g), mpc. con. nc, mpc. con. neq
@@ -602,19 +638,19 @@ function get_optim_functions(mpc::NonLinMPC, ::JuMP.GenericModel{JNT}) where JNT
602638 U0, Ŷ0 = get_tmp (U0_cache, T), get_tmp (Ŷ0_cache, T)
603639 return obj_nonlinprog! (Ŷ0, U0, mpc, model, Ue, Ŷe, ΔŨ):: T
604640 end
605- Z̃_∇J = fill (myNaN, nZ̃)
606- ∇J = Vector {JNT} (undef, nZ̃) # gradient of objective J
607- ∇J_buffer = GradientBuffer (Jfunc_vec, Z̃_∇J)
641+ Z̃_∇J = fill (myNaN, nZ̃)
642+ ∇J = Vector {JNT} (undef, nZ̃) # gradient of objective J
643+ ∇J_prep = prepare_gradient (Jfunc_vec, grad_backend, Z̃_∇J)
608644 ∇Jfunc! = if nZ̃ == 1
609645 function (Z̃arg:: T ) where T<: Real
610646 Z̃_∇J .= Z̃arg
611- gradient! (∇J, ∇J_buffer , Z̃_∇J)
647+ gradient! (Jfunc_vec, ∇J, ∇J_prep, grad_backend , Z̃_∇J)
612648 return ∇J[begin ] # univariate syntax, see JuMP.@operator doc
613649 end
614650 else
615651 function (∇J:: AbstractVector{T} , Z̃arg:: Vararg{T, N} ) where {N, T<: Real }
616652 Z̃_∇J .= Z̃arg
617- gradient! (∇J, ∇J_buffer , Z̃_∇J)
653+ gradient! (Jfunc_vec, ∇J, ∇J_prep, grad_backend , Z̃_∇J)
618654 return ∇J # multivariate syntax, see JuMP.@operator doc
619655 end
620656 end
@@ -633,25 +669,25 @@ function get_optim_functions(mpc::NonLinMPC, ::JuMP.GenericModel{JNT}) where JNT
633669 g .= get_tmp (g_cache, T)
634670 return g
635671 end
636- Z̃_∇g = fill (myNaN, nZ̃)
637- g_vec = Vector {JNT} (undef, ng)
638- ∇g = Matrix {JNT} (undef, ng, nZ̃) # Jacobian of inequality constraints g
639- ∇g_buffer = JacobianBuffer (gfunc_vec!, g_vec, Z̃_∇g)
640- ∇gfuncs! = Vector {Function} (undef, ng)
672+ Z̃_∇g = fill (myNaN, nZ̃)
673+ g_vec = Vector {JNT} (undef, ng)
674+ ∇g = Matrix {JNT} (undef, ng, nZ̃) # Jacobian of inequality constraints g
675+ ∇g_prep = prepare_jacobian (gfunc_vec!, g_vec, jac_backend , Z̃_∇g)
676+ ∇gfuncs! = Vector {Function} (undef, ng)
641677 for i in eachindex (∇gfuncs!)
642678 ∇gfuncs![i] = if nZ̃ == 1
643679 function (Z̃arg:: T ) where T<: Real
644680 if isdifferent (Z̃arg, Z̃_∇g)
645681 Z̃_∇g .= Z̃arg
646- jacobian! (∇g, ∇g_buffer, g_vec , Z̃_∇g)
682+ jacobian! (gfunc_vec!, g_vec, ∇g, ∇g_prep, jac_backend , Z̃_∇g)
647683 end
648684 return ∇g[i, begin ] # univariate syntax, see JuMP.@operator doc
649685 end
650686 else
651687 function (∇g_i, Z̃arg:: Vararg{T, N} ) where {N, T<: Real }
652688 if isdifferent (Z̃arg, Z̃_∇g)
653689 Z̃_∇g .= Z̃arg
654- jacobian! (∇g, ∇g_buffer, g_vec , Z̃_∇g)
690+ jacobian! (gfunc_vec!, g_vec, ∇g, ∇g_prep, jac_backend , Z̃_∇g)
655691 end
656692 return ∇g_i .= @views ∇g[i, :] # multivariate syntax, see JuMP.@operator doc
657693 end
@@ -672,19 +708,19 @@ function get_optim_functions(mpc::NonLinMPC, ::JuMP.GenericModel{JNT}) where JNT
672708 geq .= get_tmp (geq_cache, T)
673709 return geq
674710 end
675- Z̃_∇geq = fill (myNaN, nZ̃) # NaN to force update at 1st call
676- geq_vec = Vector {JNT} (undef, neq)
677- ∇geq = Matrix {JNT} (undef, neq, nZ̃) # Jacobian of equality constraints geq
678- ∇geq_buffer = JacobianBuffer (geqfunc_vec!, geq_vec, Z̃_∇geq)
679- ∇geqfuncs! = Vector {Function} (undef, neq)
711+ Z̃_∇geq = fill (myNaN, nZ̃)
712+ geq_vec = Vector {JNT} (undef, neq)
713+ ∇geq = Matrix {JNT} (undef, neq, nZ̃) # Jacobian of equality constraints geq
714+ ∇geq_prep = prepare_jacobian (geqfunc_vec!, geq_vec, jac_backend , Z̃_∇geq)
715+ ∇geqfuncs! = Vector {Function} (undef, neq)
680716 for i in eachindex (∇geqfuncs!)
681717 # only multivariate syntax, univariate is impossible since nonlinear equality
682718 # constraints imply MultipleShooting, thus input increment ΔU and state X̂0 in Z̃:
683719 ∇geqfuncs![i] =
684720 function (∇geq_i, Z̃arg:: Vararg{T, N} ) where {N, T<: Real }
685721 if isdifferent (Z̃arg, Z̃_∇geq)
686722 Z̃_∇geq .= Z̃arg
687- jacobian! (∇geq, ∇geq_buffer, geq_vec , Z̃_∇geq)
723+ jacobian! (geqfunc_vec!, geq_vec, ∇geq, ∇geq_prep, jac_backend , Z̃_∇geq)
688724 end
689725 return ∇geq_i .= @views ∇geq[i, :]
690726 end
0 commit comments