Skip to content

Commit cec29d2

Browse files
committed
added: using strict=Val(true) un DI.jl preparation
1 parent 0e2b855 commit cec29d2

File tree

3 files changed

+8
-6
lines changed

3 files changed

+8
-6
lines changed

Project.toml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -22,7 +22,7 @@ SparseMatrixColorings = "0a514795-09f3-496d-8182-132a7b665d35"
2222

2323
[compat]
2424
ControlSystemsBase = "1.9"
25-
DifferentiationInterface = "0.6.44"
25+
DifferentiationInterface = "0.6.45"
2626
ForwardDiff = "0.10"
2727
Ipopt = "1"
2828
JuMP = "1.21"

src/controller/nonlinmpc.jl

Lines changed: 4 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -608,6 +608,7 @@ function get_optim_functions(
608608
ng, nc, neq = length(mpc.con.i_g), mpc.con.nc, mpc.con.neq
609609
nZ̃, nU, nŶ, nX̂ = length(mpc.Z̃), Hp*nu, Hp*ny, Hp*nx̂
610610
nΔŨ, nUe, nŶe = nu*Hc + nϵ, nU + nu, nŶ + ny
611+
strict = Val(true)
611612
myNaN = convert(JNT, NaN) # NaN to force update_simulations! at first call:
612613
::Vector{JNT} = fill(myNaN, nZ̃)
613614
ΔŨ::Vector{JNT} = zeros(JNT, nΔŨ)
@@ -636,7 +637,7 @@ function get_optim_functions(
636637
Cache(Û0), Cache(X̂0),
637638
Cache(gc), Cache(g), Cache(geq),
638639
)
639-
∇J_prep = prepare_gradient(Jfunc!, grad_backend, Z̃_∇J, ∇J_context...)
640+
∇J_prep = prepare_gradient(Jfunc!, grad_backend, Z̃_∇J, ∇J_context...; strict)
640641
∇J = Vector{JNT}(undef, nZ̃)
641642
∇Jfunc! = if nZ̃ == 1
642643
function (Z̃arg)
@@ -675,7 +676,7 @@ function get_optim_functions(
675676
)
676677
# temporarily enable all the inequality constraints for sparsity detection:
677678
mpc.con.i_g[1:end-nc] .= true
678-
∇g_prep = prepare_jacobian(gfunc!, g, jac_backend, Z̃_∇g, ∇g_context...)
679+
∇g_prep = prepare_jacobian(gfunc!, g, jac_backend, Z̃_∇g, ∇g_context...; strict)
679680
mpc.con.i_g[1:end-nc] .= false
680681
∇g = init_diffmat(JNT, jac_backend, ∇g_prep, nZ̃, ng)
681682
∇gfuncs! = Vector{Function}(undef, ng)
@@ -721,7 +722,7 @@ function get_optim_functions(
721722
Cache(Û0), Cache(X̂0),
722723
Cache(gc), Cache(g)
723724
)
724-
∇geq_prep = prepare_jacobian(geqfunc!, geq, jac_backend, Z̃_∇geq, ∇geq_context...)
725+
∇geq_prep = prepare_jacobian(geqfunc!, geq, jac_backend, Z̃_∇geq, ∇geq_context...; strict)
725726
∇geq = init_diffmat(JNT, jac_backend, ∇geq_prep, nZ̃, neq)
726727
∇geqfuncs! = Vector{Function}(undef, neq)
727728
for i in eachindex(∇geqfuncs!)

src/estimator/mhe/construct.jl

Lines changed: 3 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1345,6 +1345,7 @@ function get_optim_functions(
13451345
nx̂, nym, nŷ, nu, nϵ, He = estim.nx̂, estim.nym, model.ny, model.nu, estim.nϵ, estim.He
13461346
nV̂, nX̂, ng, nZ̃ = He*nym, He*nx̂, length(con.i_g), length(estim.Z̃)
13471347
myNaN = convert(JNT, NaN) # NaN to force update_simulations! at first call
1348+
strict = Val(true)
13481349
::Vector{JNT} = fill(myNaN, nZ̃)
13491350
::Vector{JNT}, X̂0::Vector{JNT} = zeros(JNT, nV̂), zeros(JNT, nX̂)
13501351
û0::Vector{JNT}, ŷ0::Vector{JNT} = zeros(JNT, nu), zeros(JNT, nŷ)
@@ -1370,7 +1371,7 @@ function get_optim_functions(
13701371
Cache(g),
13711372
Cache(x̄),
13721373
)
1373-
∇J_prep = prepare_gradient(Jfunc!, grad_backend, Z̃_∇J, ∇J_context...)
1374+
∇J_prep = prepare_gradient(Jfunc!, grad_backend, Z̃_∇J, ∇J_context...; strict)
13741375
∇J = Vector{JNT}(undef, nZ̃)
13751376
∇Jfunc! = if nZ̃ == 1
13761377
function (Z̃arg)
@@ -1409,7 +1410,7 @@ function get_optim_functions(
14091410
# temporarily enable all the inequality constraints for sparsity detection:
14101411
estim.con.i_g .= true
14111412
estim.Nk[] = He
1412-
∇g_prep = prepare_jacobian(gfunc!, g, jac_backend, Z̃_∇g, ∇g_context...)
1413+
∇g_prep = prepare_jacobian(gfunc!, g, jac_backend, Z̃_∇g, ∇g_context...; strict)
14131414
estim.con.i_g .= false
14141415
estim.Nk[] = 0
14151416
∇g = init_diffmat(JNT, jac_backend, ∇g_prep, nZ̃, ng)

0 commit comments

Comments
 (0)