@@ -48,13 +48,17 @@ struct MovingHorizonEstimator{
48
48
NT<: Real ,
49
49
SM<: SimModel ,
50
50
JM<: JuMP.GenericModel ,
51
+ GB<: AbstractADType ,
52
+ JB<: AbstractADType ,
51
53
CE<: StateEstimator ,
52
54
} <: StateEstimator{NT}
53
55
model:: SM
54
56
# note: `NT` and the number type `JNT` in `JuMP.GenericModel{JNT}` can be
55
57
# different since solvers that support non-Float64 are scarce.
56
58
optim:: JM
57
59
con:: EstimatorConstraint{NT}
60
+ gradient:: GB
61
+ jacobian:: JB
58
62
covestim:: CE
59
63
Z̃:: Vector{NT}
60
64
lastu0:: Vector{NT}
@@ -112,9 +116,16 @@ struct MovingHorizonEstimator{
112
116
function MovingHorizonEstimator {NT} (
113
117
model:: SM ,
114
118
He, i_ym, nint_u, nint_ym, P̂_0, Q̂, R̂, Cwt,
115
- optim:: JM , gradient, jacobian, covestim:: CE ;
119
+ optim:: JM , gradient:: GB , jacobian:: JB , covestim:: CE ;
116
120
direct= true
117
- ) where {NT<: Real , SM<: SimModel{NT} , JM<: JuMP.GenericModel , CE<: StateEstimator{NT} }
121
+ ) where {
122
+ NT<: Real ,
123
+ SM<: SimModel{NT} ,
124
+ JM<: JuMP.GenericModel ,
125
+ GB<: AbstractADType ,
126
+ JB<: AbstractADType ,
127
+ CE<: StateEstimator{NT}
128
+ }
118
129
nu, ny, nd = model. nu, model. ny, model. nd
119
130
He < 1 && throw (ArgumentError (" Estimation horizon He should be ≥ 1" ))
120
131
Cwt < 0 && throw (ArgumentError (" Cwt weight should be ≥ 0" ))
@@ -158,8 +169,10 @@ struct MovingHorizonEstimator{
158
169
P̂arr_old = copy (P̂_0)
159
170
Nk = [0 ]
160
171
corrected = [false ]
161
- estim = new {NT, SM, JM, CE} (
162
- model, optim, con, covestim,
172
+ estim = new {NT, SM, JM, GB, JB, CE} (
173
+ model, optim, con,
174
+ gradient, jacobian,
175
+ covestim,
163
176
Z̃, lastu0, x̂op, f̂op, x̂0,
164
177
He, nϵ,
165
178
i_ym, nx̂, nym, nyu, nxs,
@@ -173,7 +186,7 @@ struct MovingHorizonEstimator{
173
186
direct, corrected,
174
187
buffer
175
188
)
176
- init_optimization! (estim, model, optim, gradient, jacobian )
189
+ init_optimization! (estim, model, optim)
177
190
return estim
178
191
end
179
192
end
@@ -261,9 +274,9 @@ transcription for now.
261
274
nonlinear optimizer for solving (default to [`Ipopt`](https://github.com/jump-dev/Ipopt.jl),
262
275
or [`OSQP`](https://osqp.org/docs/parsers/jump.html) if `model` is a [`LinModel`](@ref)).
263
276
- `gradient=AutoForwardDiff()` : an `AbstractADType` backend for the gradient of the objective
264
- function if `model` is not a [`LinModel`](@ref), see [`DifferentiationInterface` doc](@extref DifferentiationInterface List).
277
+ function when `model` is not a [`LinModel`](@ref), see [`DifferentiationInterface` doc](@extref DifferentiationInterface List).
265
278
- `jacobian=AutoForwardDiff()` : an `AbstractADType` backend for the Jacobian of the
266
- constraints if `model` is not a [`LinModel`](@ref), see `gradient` above for the options.
279
+ constraints when `model` is not a [`LinModel`](@ref), see `gradient` above for the options.
267
280
- `direct=true`: construct with a direct transmission from ``\m athbf{y^m}`` (a.k.a. current
268
281
estimator, in opposition to the delayed/predictor form).
269
282
@@ -1197,22 +1210,18 @@ end
1197
1210
1198
1211
"""
1199
1212
init_optimization!(
1200
- estim::MovingHorizonEstimator, model::SimModel , optim::JuMP.GenericModel, _ , _
1213
+ estim::MovingHorizonEstimator, model::LinModel , optim::JuMP.GenericModel
1201
1214
)
1202
1215
1203
1216
Init the quadratic optimization of [`MovingHorizonEstimator`](@ref).
1204
1217
"""
1205
1218
function init_optimization! (
1206
- estim:: MovingHorizonEstimator ,
1207
- :: LinModel ,
1208
- optim:: JuMP.GenericModel ,
1209
- :: AbstractADType ,
1210
- :: AbstractADType
1219
+ estim:: MovingHorizonEstimator , model:: LinModel , optim:: JuMP.GenericModel ,
1211
1220
)
1212
1221
nZ̃ = length (estim. Z̃)
1213
1222
JuMP. num_variables (optim) == 0 || JuMP. empty! (optim)
1214
1223
JuMP. set_silent (optim)
1215
- limit_solve_time (estim . optim, estim . model. Ts)
1224
+ limit_solve_time (optim, model. Ts)
1216
1225
@variable (optim, Z̃var[1 : nZ̃])
1217
1226
A = estim. con. A[estim. con. i_b, :]
1218
1227
b = estim. con. b[estim. con. i_b]
@@ -1223,28 +1232,20 @@ end
1223
1232
1224
1233
"""
1225
1234
init_optimization!(
1226
- estim::MovingHorizonEstimator,
1227
- model::SimModel,
1228
- optim::JuMP.GenericModel,
1229
- gradient::AbstractADType,
1230
- jacobian::AbstractADType
1235
+ estim::MovingHorizonEstimator, model::SimModel, optim::JuMP.GenericModel,
1231
1236
) -> nothing
1232
1237
1233
1238
Init the nonlinear optimization of [`MovingHorizonEstimator`](@ref).
1234
1239
"""
1235
1240
function init_optimization! (
1236
- estim:: MovingHorizonEstimator ,
1237
- model:: SimModel ,
1238
- optim:: JuMP.GenericModel{JNT} ,
1239
- gradient:: AbstractADType ,
1240
- jacobian:: AbstractADType
1241
+ estim:: MovingHorizonEstimator , model:: SimModel , optim:: JuMP.GenericModel{JNT}
1241
1242
) where JNT<: Real
1242
1243
C, con = estim. C, estim. con
1243
1244
nZ̃ = length (estim. Z̃)
1244
1245
# --- variables and linear constraints ---
1245
1246
JuMP. num_variables (optim) == 0 || JuMP. empty! (optim)
1246
1247
JuMP. set_silent (optim)
1247
- limit_solve_time (estim . optim, estim . model. Ts)
1248
+ limit_solve_time (optim, model. Ts)
1248
1249
@variable (optim, Z̃var[1 : nZ̃])
1249
1250
A = estim. con. A[con. i_b, :]
1250
1251
b = estim. con. b[con. i_b]
@@ -1258,9 +1259,7 @@ function init_optimization!(
1258
1259
JuMP. set_attribute (optim, " nlp_scaling_max_gradient" , 10.0 / C)
1259
1260
end
1260
1261
end
1261
- Jfunc, ∇Jfunc!, gfuncs, ∇gfuncs! = get_optim_functions (
1262
- estim, optim, gradient, jacobian
1263
- )
1262
+ Jfunc, ∇Jfunc!, gfuncs, ∇gfuncs! = get_optim_functions (estim, optim)
1264
1263
@operator (optim, J, nZ̃, Jfunc, ∇Jfunc!)
1265
1264
@objective (optim, Min, J (Z̃var... ))
1266
1265
nV̂, nX̂ = estim. He* estim. nym, estim. He* estim. nx̂
@@ -1301,10 +1300,7 @@ end
1301
1300
1302
1301
"""
1303
1302
get_optim_functions(
1304
- estim::MovingHorizonEstimator,
1305
- optim::JuMP.GenericModel,
1306
- grad_backend::AbstractADType,
1307
- jac_backend::AbstractADType
1303
+ estim::MovingHorizonEstimator, optim::JuMP.GenericModel,
1308
1304
) -> Jfunc, ∇Jfunc!, gfuncs, ∇gfuncs!
1309
1305
1310
1306
Return the functions for the nonlinear optimization of [`MovingHorizonEstimator`](@ref).
@@ -1327,10 +1323,7 @@ This method is really intricate and I'm not proud of it. That's because of 3 ele
1327
1323
Inspired from: [User-defined operators with vector outputs](@extref JuMP User-defined-operators-with-vector-outputs)
1328
1324
"""
1329
1325
function get_optim_functions (
1330
- estim:: MovingHorizonEstimator ,
1331
- optim:: JuMP.GenericModel{JNT} ,
1332
- grad_backend:: AbstractADType ,
1333
- jac_backend:: AbstractADType
1326
+ estim:: MovingHorizonEstimator , :: JuMP.GenericModel{JNT} ,
1334
1327
) where {JNT <: Real }
1335
1328
# ---------- common cache for Jfunc, gfuncs called with floats ------------------------
1336
1329
model, con = estim. model, estim. con
@@ -1363,13 +1356,13 @@ function get_optim_functions(
1363
1356
Cache (g),
1364
1357
Cache (x̄),
1365
1358
)
1366
- ∇J_prep = prepare_gradient (Jfunc!, grad_backend , Z̃_∇J, ∇J_context... ; strict)
1359
+ ∇J_prep = prepare_gradient (Jfunc!, estim . gradient , Z̃_∇J, ∇J_context... ; strict)
1367
1360
∇J = Vector {JNT} (undef, nZ̃)
1368
1361
∇Jfunc! = function (∇J:: AbstractVector{T} , Z̃arg:: Vararg{T, N} ) where {N, T<: Real }
1369
1362
# only the multivariate syntax of JuMP.@operator, univariate is impossible for MHE
1370
1363
# since Z̃ comprises the arrival state estimate AND the estimated process noise
1371
1364
Z̃_∇J .= Z̃arg
1372
- gradient! (Jfunc!, ∇J, ∇J_prep, grad_backend , Z̃_∇J, ∇J_context... )
1365
+ gradient! (Jfunc!, ∇J, ∇J_prep, estim . gradient , Z̃_∇J, ∇J_context... )
1373
1366
return ∇J
1374
1367
end
1375
1368
@@ -1397,17 +1390,17 @@ function get_optim_functions(
1397
1390
# temporarily enable all the inequality constraints for sparsity detection:
1398
1391
estim. con. i_g .= true
1399
1392
estim. Nk[] = He
1400
- ∇g_prep = prepare_jacobian (gfunc!, g, jac_backend , Z̃_∇g, ∇g_context... ; strict)
1393
+ ∇g_prep = prepare_jacobian (gfunc!, g, estim . jacobian , Z̃_∇g, ∇g_context... ; strict)
1401
1394
estim. con. i_g .= false
1402
1395
estim. Nk[] = 0
1403
- ∇g = init_diffmat (JNT, jac_backend , ∇g_prep, nZ̃, ng)
1396
+ ∇g = init_diffmat (JNT, estim . jacobian , ∇g_prep, nZ̃, ng)
1404
1397
∇gfuncs! = Vector {Function} (undef, ng)
1405
1398
for i in eachindex (∇gfuncs!)
1406
1399
∇gfuncs![i] = function (∇g_i, Z̃arg:: Vararg{T, N} ) where {N, T<: Real }
1407
1400
# only the multivariate syntax of JuMP.@operator, see above for the explanation
1408
1401
if isdifferent (Z̃arg, Z̃_∇g)
1409
1402
Z̃_∇g .= Z̃arg
1410
- jacobian! (gfunc!, g, ∇g, ∇g_prep, jac_backend , Z̃_∇g, ∇g_context... )
1403
+ jacobian! (gfunc!, g, ∇g, ∇g_prep, estim . jacobian , Z̃_∇g, ∇g_context... )
1411
1404
end
1412
1405
return ∇g_i .= @views ∇g[i, :]
1413
1406
end
0 commit comments