@@ -514,10 +514,27 @@ function init_optimization!(mpc::NonLinMPC, model::SimModel, optim)
514514end
515515
516516"""
517- get_optim_functions(mpc::NonLinMPC, ::JuMP.GenericModel) -> Jfunc, ∇Jfunc!, gfuncs, geqfuncs
517+ get_optim_functions(
518+ mpc::NonLinMPC, ::JuMP.GenericModel
519+ ) -> Jfunc, ∇Jfunc!, gfuncs, ∇gfuncs!, geqfuncs, ∇geqfuncs!
518520
519- Get the objective `Jfunc` function and `∇Jfunc!` to compute its gradient, and constraint
520- `gfuncs` and `geqfuncs` function vectors for [`NonLinMPC`](@ref).
521+ Return the functions for the nonlinear optimization of `mpc` [`NonLinMPC`](@ref) controller.
522+
523+ Return the nonlinear objective `Jfunc` function, and `∇Jfunc!`, to compute its gradient.
524+ Also return vectors with the nonlinear inequality constraint functions `gfuncs`, and
525+ `∇gfuncs!`, for the associated gradients. Lastly, also return vectors with the nonlinear
526+ equality constraint functions `geqfuncs` and gradients `∇geqfuncs!`.
527+
528+ This method is really indicated and I'm not proud of it. That's because of 3 elements:
529+
530+ - These functions are used inside the nonlinear optimization, so they must be type-stable
531+ and as efficient as possible.
532+ - The `JuMP` NLP syntax forces splatting for the decision variable, which implies use
533+ of `Vararg{T,N}` (see the [performance tip](https://docs.julialang.org/en/v1/manual/performance-tips/#Be-aware-of-when-Julia-avoids-specializing))
534+ and memoization to avoid redundant computations. This is already complex,
535+ but it's even worse knowing that AD tools for gradients do not support splatting.
536+ - The signature of gradient and hessian functions is not the same for univariate (`nZ̃ == 1`)
537+ and multivariate (`nZ̃ > 1`) operators in `JuMP`. Both must be defined.
521538
522539Inspired from: [User-defined operators with vector outputs](https://jump.dev/JuMP.jl/stable/tutorials/nonlinear/tips_and_tricks/#User-defined-operators-with-vector-outputs)
523540"""
@@ -577,7 +594,6 @@ function get_optim_functions(mpc::NonLinMPC, ::JuMP.GenericModel{JNT}) where JNT
577594 geq_vec = Vector {JNT} (undef, neq)
578595 ∇geq = Matrix {JNT} (undef, neq, nZ̃) # Jacobian of geq
579596 # --------------------- objective function --------------------------------------------
580- # force specialization with Vararg, see https://docs.julialang.org/en/v1/manual/performance-tips/#Be-aware-of-when-Julia-avoids-specializing
581597 function Jfunc (Z̃arg:: Vararg{T, N} ) where {N, T<: Real }
582598 update_simulations! (Z̃arg, get_tmp (Z̃_cache, T))
583599 ΔŨ = get_tmp (ΔŨ_cache, T)
@@ -609,7 +625,7 @@ function get_optim_functions(mpc::NonLinMPC, ::JuMP.GenericModel{JNT}) where JNT
609625 # --------------------- inequality constraint functions -------------------------------
610626 gfuncs = Vector {Function} (undef, ng)
611627 for i in eachindex (gfuncs)
612- func_i = function (Z̃arg:: Vararg{T, N} ) where {N, T<: Real } # see comment above
628+ func_i = function (Z̃arg:: Vararg{T, N} ) where {N, T<: Real }
613629 update_simulations! (Z̃arg, get_tmp (Z̃_cache, T))
614630 g = get_tmp (g_cache, T)
615631 return g[i]:: T
@@ -642,7 +658,7 @@ function get_optim_functions(mpc::NonLinMPC, ::JuMP.GenericModel{JNT}) where JNT
642658 # --------------------- equality constraint functions ---------------------------------
643659 geqfuncs = Vector {Function} (undef, neq)
644660 for i in eachindex (geqfuncs)
645- func_i = function (Z̃arg:: Vararg{T, N} ) where {N, T<: Real } # see comment above
661+ func_i = function (Z̃arg:: Vararg{T, N} ) where {N, T<: Real }
646662 update_simulations! (Z̃arg, get_tmp (Z̃_cache, T))
647663 geq = get_tmp (geq_cache, T)
648664 return geq[i]:: T
@@ -657,20 +673,15 @@ function get_optim_functions(mpc::NonLinMPC, ::JuMP.GenericModel{JNT}) where JNT
657673 ∇geq_buffer = JacobianBuffer (geqfunc_vec!, geq_vec, Z̃arg_vec)
658674 ∇geqfuncs! = Vector {Function} (undef, neq)
659675 for i in eachindex (∇geqfuncs!)
660- ∇geqfuncs![i] = if nZ̃ == 1
661- function (Z̃arg:: T ) where T<: Real
662- Z̃arg_vec .= Z̃arg
663- jacobian! (∇geq, ∇geq_buffer, geq_vec, Z̃arg_vec)
664- return ∇geq[i, begin ] # univariate syntax, see JuMP.@operator doc
665- end
666- else
676+ # only multivariate syntax, univariate is impossible since nonlinear equality
677+ # constraints imply MultipleShooting thus input AND state in Z̃:
678+ ∇geqfuncs![i] =
667679 function (∇geq_i, Z̃arg:: Vararg{T, N} ) where {N, T<: Real }
668680 Z̃arg_vec .= Z̃arg
669681 jacobian! (∇geq, ∇geq_buffer, geq_vec, Z̃arg_vec)
670682 ∇geq_i .= @views ∇geq[i, :]
671- return ∇geq_i # multivariate syntax, see JuMP.@operator doc
683+ return ∇geq_i
672684 end
673- end
674685 end
675686 return Jfunc, ∇Jfunc!, gfuncs, ∇gfuncs!, geqfuncs, ∇geqfuncs!
676687end
0 commit comments