@@ -706,7 +706,12 @@ function setconstraint!(
706706 JuMP. delete (optim, optim[:linconstraint ])
707707 JuMP. unregister (optim, :linconstraint )
708708 @constraint (optim, linconstraint, A* Z̃var .≤ b)
709- set_nonlincon! (estim, model, optim)
709+ if JuMP. solver_name (optim) ≠ " Ipopt"
710+ set_nonlincon! (estim, model, optim)
711+ else
712+ g_oracle, = get_nonlinops (estim, optim)
713+ set_nonlincon_exp! (estim, model, g_oracle)
714+ end
710715 else
711716 i_b, i_g = init_matconstraint_mhe (model,
712717 i_x̃min, i_x̃max, i_X̂min, i_X̂max, i_Ŵmin, i_Ŵmax, i_V̂min, i_V̂max
@@ -1278,41 +1283,51 @@ function init_optimization!(
12781283 JuMP. set_attribute (optim, " nlp_scaling_max_gradient" , 10.0 / C)
12791284 end
12801285 end
1281- Jfunc, ∇Jfunc!, gfuncs, ∇gfuncs! = get_optim_functions (estim, optim)
1282- @operator (optim, J, nZ̃, Jfunc, ∇Jfunc!)
1286+ if JuMP. solver_name (optim) ≠ " Ipopt"
1287+ # everything with the splatting syntax:
1288+ J_func, ∇J_func!, g_funcs, ∇g_funcs! = get_optim_functions (estim, optim)
1289+ else
1290+ # constraints with vector nonlinear oracle, objective function with splatting:
1291+ g_oracle, J_func, ∇J_func! = get_nonlinops (estim, optim)
1292+ end
1293+ @operator (optim, J, nZ̃, J_func, ∇J_func!)
12831294 @objective (optim, Min, J (Z̃var... ))
12841295 nV̂, nX̂ = estim. He* estim. nym, estim. He* estim. nx̂
1285- if length (con. i_g) ≠ 0
1286- i_base = 0
1287- for i in eachindex (con. X̂0min)
1288- name = Symbol (" g_X̂0min_$i " )
1289- optim[name] = JuMP. add_nonlinear_operator (
1290- optim, nZ̃, gfuncs[i_base + i], ∇gfuncs![i_base + i]; name
1291- )
1292- end
1293- i_base = nX̂
1294- for i in eachindex (con. X̂0max)
1295- name = Symbol (" g_X̂0max_$i " )
1296- optim[name] = JuMP. add_nonlinear_operator (
1297- optim, nZ̃, gfuncs[i_base + i], ∇gfuncs![i_base + i]; name
1298- )
1299- end
1300- i_base = 2 * nX̂
1301- for i in eachindex (con. V̂min)
1302- name = Symbol (" g_V̂min_$i " )
1303- optim[name] = JuMP. add_nonlinear_operator (
1304- optim, nZ̃, gfuncs[i_base + i], ∇gfuncs![i_base + i]; name
1305- )
1306- end
1307- i_base = 2 * nX̂ + nV̂
1308- for i in eachindex (con. V̂max)
1309- name = Symbol (" g_V̂max_$i " )
1310- optim[name] = JuMP. add_nonlinear_operator (
1311- optim, nZ̃, gfuncs[i_base + i], ∇gfuncs![i_base + i]; name
1312- )
1296+ if JuMP. solver_name (optim) ≠ " Ipopt"
1297+ if length (con. i_g) ≠ 0
1298+ i_base = 0
1299+ for i in eachindex (con. X̂0min)
1300+ name = Symbol (" g_X̂0min_$i " )
1301+ optim[name] = JuMP. add_nonlinear_operator (
1302+ optim, nZ̃, g_funcs[i_base + i], ∇g_funcs![i_base + i]; name
1303+ )
1304+ end
1305+ i_base = nX̂
1306+ for i in eachindex (con. X̂0max)
1307+ name = Symbol (" g_X̂0max_$i " )
1308+ optim[name] = JuMP. add_nonlinear_operator (
1309+ optim, nZ̃, g_funcs[i_base + i], ∇g_funcs![i_base + i]; name
1310+ )
1311+ end
1312+ i_base = 2 * nX̂
1313+ for i in eachindex (con. V̂min)
1314+ name = Symbol (" g_V̂min_$i " )
1315+ optim[name] = JuMP. add_nonlinear_operator (
1316+ optim, nZ̃, g_funcs[i_base + i], ∇g_funcs![i_base + i]; name
1317+ )
1318+ end
1319+ i_base = 2 * nX̂ + nV̂
1320+ for i in eachindex (con. V̂max)
1321+ name = Symbol (" g_V̂max_$i " )
1322+ optim[name] = JuMP. add_nonlinear_operator (
1323+ optim, nZ̃, g_funcs[i_base + i], ∇g_funcs![i_base + i]; name
1324+ )
1325+ end
13131326 end
1327+ set_nonlincon! (estim, model, optim)
1328+ else
1329+ set_nonlincon_exp! (estim, model, g_oracle)
13141330 end
1315- set_nonlincon! (estim, model, optim)
13161331 return nothing
13171332end
13181333
@@ -1379,11 +1394,11 @@ function get_optim_functions(
13791394 J[], _ = value_and_gradient! (Jfunc!, ∇J, ∇J_prep, grad, Z̃_∇J, ∇J_context... )
13801395 end
13811396 end
1382- function Jfunc (Z̃arg:: Vararg{T, N} ) where {N, T<: Real }
1397+ function J_func (Z̃arg:: Vararg{T, N} ) where {N, T<: Real }
13831398 update_objective! (J, ∇J, Z̃_∇J, Z̃arg)
13841399 return J[]:: T
13851400 end
1386- ∇Jfunc ! = function (∇Jarg:: AbstractVector{T} , Z̃arg:: Vararg{T, N} ) where {N, T<: Real }
1401+ ∇J_func ! = function (∇Jarg:: AbstractVector{T} , Z̃arg:: Vararg{T, N} ) where {N, T<: Real }
13871402 # only the multivariate syntax of JuMP.@operator, univariate is impossible for MHE
13881403 # since Z̃ comprises the arrival state estimate AND the estimated process noise
13891404 update_objective! (J, ∇J, Z̃_∇J, Z̃arg)
@@ -1410,23 +1425,124 @@ function get_optim_functions(
14101425 value_and_jacobian! (gfunc!, g, ∇g, ∇g_prep, jac, Z̃_∇g, ∇g_context... )
14111426 end
14121427 end
1413- gfuncs = Vector {Function} (undef, ng)
1414- for i in eachindex (gfuncs )
1428+ g_funcs = Vector {Function} (undef, ng)
1429+ for i in eachindex (g_funcs )
14151430 gfunc_i = function (Z̃arg:: Vararg{T, N} ) where {N, T<: Real }
14161431 update_con! (g, ∇g, Z̃_∇g, Z̃arg)
14171432 return g[i]:: T
14181433 end
1419- gfuncs [i] = gfunc_i
1434+ g_funcs [i] = gfunc_i
14201435 end
1421- ∇gfuncs ! = Vector {Function} (undef, ng)
1422- for i in eachindex (∇gfuncs !)
1423- ∇gfuncs ![i] = function (∇g_i, Z̃arg:: Vararg{T, N} ) where {N, T<: Real }
1436+ ∇g_funcs ! = Vector {Function} (undef, ng)
1437+ for i in eachindex (∇g_funcs !)
1438+ ∇g_funcs ![i] = function (∇g_i, Z̃arg:: Vararg{T, N} ) where {N, T<: Real }
14241439 # only the multivariate syntax of JuMP.@operator, see above for the explanation
14251440 update_con! (g, ∇g, Z̃_∇g, Z̃arg)
14261441 return ∇g_i .= @views ∇g[i, :]
14271442 end
14281443 end
1429- return Jfunc, ∇Jfunc!, gfuncs, ∇gfuncs!
1444+ return J_func, ∇J_func!, g_funcs, ∇g_funcs!
1445+ end
1446+
1447+ # TODO : move docstring of method above here an re-work it
1448+ function get_nonlinops (
1449+ estim:: MovingHorizonEstimator , :: JuMP.GenericModel{JNT}
1450+ ) where JNT<: Real
1451+ # ----------- common cache for Jfunc and gfuncs --------------------------------------
1452+ model, con = estim. model, estim. con
1453+ grad, jac = estim. gradient, estim. jacobian
1454+ nx̂, nym, nŷ, nu, nε, nk = estim. nx̂, estim. nym, model. ny, model. nu, estim. nε, model. nk
1455+ He = estim. He
1456+ i_g = findall (con. i_g) # convert to non-logical indices for non-allocating @views
1457+ ng, ngi = length (con. i_g), sum (con. i_g)
1458+ nV̂, nX̂, ng, nZ̃ = He* nym, He* nx̂, length (con. i_g), length (estim. Z̃)
1459+ strict = Val (true )
1460+ myNaN, myInf = convert (JNT, NaN ), convert (JNT, Inf )
1461+ J:: Vector{JNT} = zeros (JNT, 1 )
1462+ V̂:: Vector{JNT} , X̂0:: Vector{JNT} = zeros (JNT, nV̂), zeros (JNT, nX̂)
1463+ k0:: Vector{JNT} = zeros (JNT, nk)
1464+ û0:: Vector{JNT} , ŷ0:: Vector{JNT} = zeros (JNT, nu), zeros (JNT, nŷ)
1465+ g:: Vector{JNT} , gi:: Vector{JNT} = zeros (JNT, ng), zeros (JNT, ngi)
1466+ x̄:: Vector{JNT} = zeros (JNT, nx̂)
1467+ # -------------- inequality constraint: nonlinear oracle -----------------------------
1468+ function gi! (gi, Z̃, V̂, X̂0, û0, k0, ŷ0, g)
1469+ update_prediction! (V̂, X̂0, û0, k0, ŷ0, g, estim, Z̃)
1470+ gi .= @views g[i_g]
1471+ return nothing
1472+ end
1473+ Z̃_∇gi = fill (myNaN, nZ̃) # NaN to force update_predictions! at first call
1474+ ∇gi_context = (
1475+ Cache (V̂), Cache (X̂0), Cache (û0), Cache (k0), Cache (ŷ0), Cache (g)
1476+ )
1477+ # temporarily "fill" the estimation window for the preparation of the gradient:
1478+ estim. Nk[] = He
1479+ ∇gi_prep = prepare_jacobian (gi!, gi, jac, Z̃_∇gi, ∇gi_context... ; strict)
1480+ estim. Nk[] = 0
1481+ ∇gi = init_diffmat (JNT, jac, ∇gi_prep, nZ̃, ngi)
1482+ function update_con! (gi, ∇gi, Z̃_∇gi, Z̃_arg)
1483+ if isdifferent (Z̃_arg, Z̃_∇gi)
1484+ Z̃_∇gi .= Z̃_arg
1485+ value_and_jacobian! (gi!, gi, ∇gi, ∇gi_prep, jac, Z̃_∇gi, ∇gi_context... )
1486+ end
1487+ return nothing
1488+ end
1489+ function gi_func! (gi_vec, Z̃_arg)
1490+ update_con! (gi, ∇gi, Z̃_∇gi, Z̃_arg)
1491+ return gi_vec .= gi
1492+ end
1493+ function ∇gi_func! (∇gi_vec, Z̃_arg)
1494+ update_con! (gi, ∇gi, Z̃_∇gi, Z̃_arg)
1495+ return diffmat2vec! (∇gi_vec, ∇gi)
1496+ end
1497+ gi_min = fill (- myInf, ngi)
1498+ gi_max = zeros (JNT, ngi)
1499+ ∇gi_structure = init_diffstructure (∇gi)
1500+ g_oracle = Ipopt. _VectorNonlinearOracle (;
1501+ dimension = nZ̃,
1502+ l = gi_min,
1503+ u = gi_max,
1504+ eval_f = gi_func!,
1505+ jacobian_structure = ∇gi_structure,
1506+ eval_jacobian = ∇gi_func!
1507+ )
1508+ # ------------- objective function: splatting syntax ---------------------------------
1509+ function J! (Z̃, V̂, X̂0, û0, k0, ŷ0, g, x̄)
1510+ update_prediction! (V̂, X̂0, û0, k0, ŷ0, g, estim, Z̃)
1511+ return obj_nonlinprog! (x̄, estim, model, V̂, Z̃)
1512+ end
1513+ Z̃_∇J = fill (myNaN, nZ̃) # NaN to force update_predictions! at first call
1514+ ∇J_context = (
1515+ Cache (V̂), Cache (X̂0), Cache (û0), Cache (k0), Cache (ŷ0),
1516+ Cache (g),
1517+ Cache (x̄),
1518+ )
1519+ # temporarily "fill" the estimation window for the preparation of the gradient:
1520+ estim. Nk[] = He
1521+ ∇J_prep = prepare_gradient (J!, grad, Z̃_∇J, ∇J_context... ; strict)
1522+ estim. Nk[] = 0
1523+ ∇J = Vector {JNT} (undef, nZ̃)
1524+ function update_objective! (J, ∇J, Z̃_∇J, Z̃_arg)
1525+ if isdifferent (Z̃_arg, Z̃_∇J)
1526+ Z̃_∇J .= Z̃_arg
1527+ J[], _ = value_and_gradient! (J!, ∇J, ∇J_prep, grad, Z̃_∇J, ∇J_context... )
1528+ end
1529+ end
1530+ function J_func (Z̃_arg:: Vararg{T, N} ) where {N, T<: Real }
1531+ update_objective! (J, ∇J, Z̃_∇J, Z̃_arg)
1532+ return J[]:: T
1533+ end
1534+ ∇J_func! = if nZ̃ == 1 # univariate syntax (see JuMP.@operator doc):
1535+ function (Z̃_arg)
1536+ update_objective! (J, ∇J, Z̃_∇J, Z̃_arg)
1537+ return ∇J[]
1538+ end
1539+ else # multivariate syntax (see JuMP.@operator doc):
1540+ function (∇J_arg:: AbstractVector{T} , Z̃_arg:: Vararg{T, N} ) where {N, T<: Real }
1541+ update_objective! (J, ∇J, Z̃_∇J, Z̃_arg)
1542+ return ∇J_arg .= ∇J
1543+ end
1544+ end
1545+ g_oracle, J_func, ∇J_func!
14301546end
14311547
14321548" By default, no nonlinear constraints in the MHE, thus return nothing."
@@ -1457,4 +1573,23 @@ function set_nonlincon!(
14571573 JuMP. @constraint (optim, gfunc_i (Z̃var... ) <= 0 )
14581574 end
14591575 return nothing
1576+ end
1577+
1578+ " By default, there is no nonlinear constraint, thus do nothing."
1579+ set_nonlincon_exp! (:: MovingHorizonEstimator , :: SimModel , _ ) = nothing
1580+
1581+ """
1582+ set_nonlincon_exp!(estim::MovingHorizonEstimator, ::NonLinModel, g_oracle)
1583+
1584+ Set the nonlinear inequality constraints for `NonLinModel`, if any.
1585+ """
1586+ function set_nonlincon_exp! (estim:: MovingHorizonEstimator , :: NonLinModel , g_oracle)
1587+ optim = estim. optim
1588+ Z̃var = optim[:Z̃var ]
1589+ nonlin_constraints = JuMP. all_constraints (
1590+ optim, JuMP. Vector{JuMP. VariableRef}, Ipopt. _VectorNonlinearOracle
1591+ )
1592+ map (con_ref -> JuMP. delete (optim, con_ref), nonlin_constraints)
1593+ any (estim. con. i_g) && @constraint (optim, Z̃var in g_oracle)
1594+ return nothing
14601595end
0 commit comments