You signed in with another tab or window. Reload to refresh your session.You signed out in another tab or window. Reload to refresh your session.You switched accounts on another tab or window. Reload to refresh your session.Dismiss alert
@@ -1201,6 +1201,8 @@ function linearize(sys, lin_fun; t = 0.0, op = Dict(), allow_input_derivatives =
1201
1201
nz =size(f_z, 2)
1202
1202
ny =size(h_x, 1)
1203
1203
1204
+
D = h_u
1205
+
1204
1206
ifisempty(g_z)
1205
1207
A = f_x
1206
1208
B = f_u
@@ -1216,20 +1218,20 @@ function linearize(sys, lin_fun; t = 0.0, op = Dict(), allow_input_derivatives =
1216
1218
A = [f_x f_z
1217
1219
gzgx*f_x gzgx*f_z]
1218
1220
B = [f_u
1219
-
zeros(nz, nu)]
1221
+
gzgx * f_u] # The cited paper has zeros in the bottom block, see derivation in https://github.com/SciML/ModelingToolkit.jl/pull/1691 for the correct formula
1222
+
1220
1223
C = [h_x h_z]
1221
1224
Bs =-(gz \ g_u) # This equation differ from the cited paper, the paper is likely wrong since their equaiton leads to a dimension mismatch.
1222
1225
if!iszero(Bs)
1223
1226
if!allow_input_derivatives
1224
1227
der_inds =findall(vec(any(!=(0), Bs, dims =1)))
1225
-
error("Input derivatives appeared in expressions (-g_z\\g_u != 0), the following inputs appeared differentiated: $(inputs(sys)[der_inds]). Call `linear_staespace` with keyword argument `allow_input_derivatives = true` to allow this and have the returned `B` matrix be of double width ($(2nu)), where the last $nu inputs are the derivatives of the first $nu inputs.")
1228
+
error("Input derivatives appeared in expressions (-g_z\\g_u != 0), the following inputs appeared differentiated: $(inputs(sys)[der_inds]). Call `linear_statespace` with keyword argument `allow_input_derivatives = true` to allow this and have the returned `B` matrix be of double width ($(2nu)), where the last $nu inputs are the derivatives of the first $nu inputs.")
0 commit comments