@@ -1280,20 +1280,17 @@ function init_optimization!(
12801280 end
12811281 end
12821282 # constraints with vector nonlinear oracle, objective function with splatting:
1283- g_oracle, J_func, ∇J_func! = get_nonlinops (estim, optim)
1284- @operator ( optim, J, nZ̃, J_func, ∇J_func!)
1285- @objective (optim, Min, J (Z̃var... ))
1283+ g_oracle, J_op = get_nonlinops (estim, optim)
1284+ optim[ :J_op ] = J_op
1285+ @objective (optim, Min, J_op (Z̃var... ))
12861286 set_nonlincon! (estim, model, optim, g_oracle)
12871287 return nothing
12881288end
12891289
1290-
12911290"""
1292- get_optim_functions(
1293- estim::MovingHorizonEstimator, optim::JuMP.GenericModel,
1294- ) -> Jfunc, ∇Jfunc!, gfuncs, ∇gfuncs!
1291+ get_nonlinops(estim::MovingHorizonEstimator, optim) -> g_oracle, J_op
12951292
1296- Return the functions for the nonlinear optimization of [`MovingHorizonEstimator`](@ref).
1293+ Return the operators for the nonlinear optimization of [`MovingHorizonEstimator`](@ref).
12971294
12981295Return `g_oracle`, the [`VectorNonlinearOracle`](@extref MOI MathOptInterface.VectorNonlinearOracle)
12991296for the ineqaulity constraints. Note that `g_oracle` only includes the non-`Inf`
@@ -1308,117 +1305,8 @@ on the splatting syntax. This method is really intricate and that's because of 2
13081305 and memoization to avoid redundant computations. This is already complex, but it's even
13091306 worse knowing that the automatic differentiation tools do not support splatting.
13101307"""
1311- function get_optim_functions (
1312- estim:: MovingHorizonEstimator , :: JuMP.GenericModel{JNT} ,
1313- ) where {JNT <: Real }
1314- # ----------- common cache for Jfunc and gfuncs --------------------------------------
1315- model, con = estim. model, estim. con
1316- grad, jac = estim. gradient, estim. jacobian
1317- nx̂, nym, nŷ, nu, nε, nk = estim. nx̂, estim. nym, model. ny, model. nu, estim. nε, model. nk
1318- He = estim. He
1319- nV̂, nX̂, ng, nZ̃ = He* nym, He* nx̂, length (con. i_g), length (estim. Z̃)
1320- strict = Val (true )
1321- myNaN = convert (JNT, NaN )
1322- J:: Vector{JNT} = zeros (JNT, 1 )
1323- V̂:: Vector{JNT} , X̂0:: Vector{JNT} = zeros (JNT, nV̂), zeros (JNT, nX̂)
1324- k0:: Vector{JNT} = zeros (JNT, nk)
1325- û0:: Vector{JNT} , ŷ0:: Vector{JNT} = zeros (JNT, nu), zeros (JNT, nŷ)
1326- g:: Vector{JNT} = zeros (JNT, ng)
1327- x̄:: Vector{JNT} = zeros (JNT, nx̂)
1328- # --------------------- objective functions -------------------------------------------
1329- function Jfunc! (Z̃, V̂, X̂0, û0, k0, ŷ0, g, x̄)
1330- update_prediction! (V̂, X̂0, û0, k0, ŷ0, g, estim, Z̃)
1331- return obj_nonlinprog! (x̄, estim, model, V̂, Z̃)
1332- end
1333- Z̃_∇J = fill (myNaN, nZ̃) # NaN to force update_predictions! at first call
1334- ∇J_context = (
1335- Cache (V̂), Cache (X̂0), Cache (û0), Cache (k0), Cache (ŷ0),
1336- Cache (g),
1337- Cache (x̄),
1338- )
1339- # temporarily "fill" the estimation window for the preparation of the gradient:
1340- estim. Nk[] = He
1341- ∇J_prep = prepare_gradient (Jfunc!, grad, Z̃_∇J, ∇J_context... ; strict)
1342- estim. Nk[] = 0
1343- ∇J = Vector {JNT} (undef, nZ̃)
1344- function update_objective! (J, ∇J, Z̃_∇J, Z̃arg)
1345- if isdifferent (Z̃arg, Z̃_∇J)
1346- Z̃_∇J .= Z̃arg
1347- J[], _ = value_and_gradient! (Jfunc!, ∇J, ∇J_prep, grad, Z̃_∇J, ∇J_context... )
1348- end
1349- end
1350- function J_func (Z̃arg:: Vararg{T, N} ) where {N, T<: Real }
1351- update_objective! (J, ∇J, Z̃_∇J, Z̃arg)
1352- return J[]:: T
1353- end
1354- ∇J_func! = function (∇Jarg:: AbstractVector{T} , Z̃arg:: Vararg{T, N} ) where {N, T<: Real }
1355- # only the multivariate syntax of JuMP.@operator, univariate is impossible for MHE
1356- # since Z̃ comprises the arrival state estimate AND the estimated process noise
1357- update_objective! (J, ∇J, Z̃_∇J, Z̃arg)
1358- return ∇Jarg .= ∇J
1359- end
1360- # --------------------- inequality constraint functions -------------------------------
1361- function gfunc! (g, Z̃, V̂, X̂0, û0, k0, ŷ0)
1362- return update_prediction! (V̂, X̂0, û0, k0, ŷ0, g, estim, Z̃)
1363- end
1364- Z̃_∇g = fill (myNaN, nZ̃) # NaN to force update_predictions! at first call
1365- ∇g_context = (
1366- Cache (V̂), Cache (X̂0), Cache (û0), Cache (k0), Cache (ŷ0),
1367- )
1368- # temporarily enable all the inequality constraints for sparsity detection:
1369- estim. con. i_g .= true
1370- estim. Nk[] = He
1371- ∇g_prep = prepare_jacobian (gfunc!, g, jac, Z̃_∇g, ∇g_context... ; strict)
1372- estim. con. i_g .= false
1373- estim. Nk[] = 0
1374- ∇g = init_diffmat (JNT, jac, ∇g_prep, nZ̃, ng)
1375- function update_con! (g, ∇g, Z̃_∇g, Z̃arg)
1376- if isdifferent (Z̃arg, Z̃_∇g)
1377- Z̃_∇g .= Z̃arg
1378- value_and_jacobian! (gfunc!, g, ∇g, ∇g_prep, jac, Z̃_∇g, ∇g_context... )
1379- end
1380- end
1381- g_funcs = Vector {Function} (undef, ng)
1382- for i in eachindex (g_funcs)
1383- gfunc_i = function (Z̃arg:: Vararg{T, N} ) where {N, T<: Real }
1384- update_con! (g, ∇g, Z̃_∇g, Z̃arg)
1385- return g[i]:: T
1386- end
1387- g_funcs[i] = gfunc_i
1388- end
1389- ∇g_funcs! = Vector {Function} (undef, ng)
1390- for i in eachindex (∇g_funcs!)
1391- ∇g_funcs![i] = function (∇g_i, Z̃arg:: Vararg{T, N} ) where {N, T<: Real }
1392- # only the multivariate syntax of JuMP.@operator, see above for the explanation
1393- update_con! (g, ∇g, Z̃_∇g, Z̃arg)
1394- return ∇g_i .= @views ∇g[i, :]
1395- end
1396- end
1397- return J_func, ∇J_func!, g_funcs, ∇g_funcs!
1398- end
1399-
1400- """
1401- get_nonlinops(estim::MovingHorizonEstimator, optim) -> g_oracle, J_op
1402-
1403- Return the operators for the nonlinear optimization of [`MovingHorizonEstimator`](@ref).
1404-
1405- Return `g_oracle`, the inequality [`VectorNonlinearOracle`](@extref MathOptInterface MathOptInterface.VectorNonlinearOracle),
1406- . Note that `g_oracle` only includes the non-`Inf` inequality constraints, thus
1407- it must be re-constructed if they change. Also return `J_op`, the [`NonlinearOperator`](@extref JuMP NonlinearOperator)
1408- for the objective function, based on the splatting syntax. This method is really intricate
1409- and that's because of 3 elements:
1410-
1411- - These functions are used inside the nonlinear optimization, so they must be type-stable
1412- and as efficient as possible. All the function outputs and derivatives are cached and
1413- updated in-place if required to use the efficient [`value_and_jacobian!`](@extref DifferentiationInterface DifferentiationInterface.value_and_jacobian!).
1414- - The splatting syntax for objective functions implies the use of `Vararg{T,N}` (see the [performance tip](@extref Julia Be-aware-of-when-Julia-avoids-specializing))
1415- and memoization to avoid redundant computations. This is already complex, but it's even
1416- worse knowing that the automatic differentiation tools do not support splatting.
1417- - The signature of gradient and hessian functions is not the same for univariate (`nZ̃ == 1`)
1418- and multivariate (`nZ̃ > 1`) operators in `JuMP`. Both must be defined.
1419- """
14201308function get_nonlinops (
1421- estim:: MovingHorizonEstimator , :: JuMP.GenericModel{JNT}
1309+ estim:: MovingHorizonEstimator , optim :: JuMP.GenericModel{JNT}
14221310) where JNT<: Real
14231311 # ----------- common cache for Jfunc and gfuncs --------------------------------------
14241312 model, con = estim. model, estim. con
@@ -1503,18 +1391,14 @@ function get_nonlinops(
15031391 update_objective! (J, ∇J, Z̃_∇J, Z̃_arg)
15041392 return J[]:: T
15051393 end
1506- ∇J_func! = if nZ̃ == 1 # univariate syntax (see JuMP.@operator doc):
1507- function (Z̃_arg)
1508- update_objective! (J, ∇J, Z̃_∇J, Z̃_arg)
1509- return ∇J[]
1510- end
1511- else # multivariate syntax (see JuMP.@operator doc):
1512- function (∇J_arg:: AbstractVector{T} , Z̃_arg:: Vararg{T, N} ) where {N, T<: Real }
1513- update_objective! (J, ∇J, Z̃_∇J, Z̃_arg)
1514- return ∇J_arg .= ∇J
1515- end
1394+ function ∇J_func! (∇J_arg:: AbstractVector{T} , Z̃_arg:: Vararg{T, N} ) where {N, T<: Real }
1395+ # only the multivariate syntax of JuMP.@operator, univariate is impossible for MHE
1396+ # since Z̃ comprises the arrival state estimate AND the estimated process noise
1397+ update_objective! (J, ∇J, Z̃_∇J, Z̃_arg)
1398+ return ∇J_arg .= ∇J
15161399 end
1517- g_oracle, J_func, ∇J_func!
1400+ J_op = JuMP. add_nonlinear_operator (optim, nZ̃, J_func, ∇J_func!, name= :J_op )
1401+ g_oracle, J_op
15181402end
15191403
15201404" By default, there is no nonlinear constraint, thus do nothing."
0 commit comments