@@ -110,8 +110,8 @@ sys = structural_simplify(ude_sys)
110
110
We now setup the loss function and the optimization loop.
111
111
112
112
``` @example friction
113
- function loss(x, (prob, sol_ref, get_vars, get_refs))
114
- new_p = SciMLStructures.replace(Tunable(), prob.p , x)
113
+ function loss(x, (prob, sol_ref, get_vars, get_refs, set_x ))
114
+ new_p = set_x( prob, x)
115
115
new_prob = remake(prob, p = new_p, u0 = eltype(x).(prob.u0))
116
116
ts = sol_ref.t
117
117
new_sol = solve(new_prob, Rodas4(), saveat = ts, abstol = 1e-8, reltol = 1e-8)
@@ -131,14 +131,15 @@ of = OptimizationFunction{true}(loss, AutoForwardDiff())
131
131
prob = ODEProblem(sys, [], (0, 0.1), [])
132
132
get_vars = getu(sys, [sys.friction.y])
133
133
get_refs = getu(model_true, [model_true.y])
134
- x0 = reduce(vcat, getindex.((default_values(sys),), tunable_parameters(sys)))
134
+ set_x = setp_oop(sys, sys.nn.p)
135
+ x0 = default_values(sys)[sys.nn.p]
135
136
136
137
cb = (opt_state, loss) -> begin
137
138
@info "step $(opt_state.iter), loss: $loss"
138
139
return false
139
140
end
140
141
141
- op = OptimizationProblem(of, x0, (prob, sol_ref, get_vars, get_refs))
142
+ op = OptimizationProblem(of, x0, (prob, sol_ref, get_vars, get_refs, set_x ))
142
143
res = solve(op, Adam(5e-3); maxiters = 10000, callback = cb)
143
144
```
144
145
@@ -147,7 +148,7 @@ res = solve(op, Adam(5e-3); maxiters = 10000, callback = cb)
147
148
We now have a trained neural network! We can check whether running the simulation of the model embedded with the neural network matches the data or not.
148
149
149
150
``` @example friction
150
- res_p = SciMLStructures.replace(Tunable(), prob.p , res.u)
151
+ res_p = set_x( prob, res.u)
151
152
res_prob = remake(prob, p = res_p)
152
153
res_sol = solve(res_prob, Rodas4(), saveat = sol_ref.t)
153
154
@test first.(sol_ref.u)≈first.(res_sol.u) rtol=1e-3 #hide
0 commit comments