|
2 | 2 |
|
3 | 3 | ```@example |
4 | 4 | using SciMLSensitivity |
5 | | -using DifferentialEquations, Flux, Random, Plots |
| 5 | +using DifferentialEquations, Flux, Random, Plots, MLUtils |
6 | 6 | using IterTools: ncycle |
7 | 7 |
|
8 | 8 | rng = Random.default_rng() |
@@ -46,7 +46,7 @@ ode_data = Array(solve(true_prob, Tsit5(), saveat = t)) |
46 | 46 | prob = ODEProblem{false}(dudt_, u0, tspan, θ) |
47 | 47 |
|
48 | 48 | k = 10 |
49 | | -train_loader = Flux.Data.DataLoader((ode_data, t), batchsize = k) |
| 49 | +train_loader = DataLoader((ode_data, t), batchsize = k) |
50 | 50 |
|
51 | 51 | for (x, y) in train_loader |
52 | 52 | @show x |
@@ -96,7 +96,7 @@ When training a neural network, we need to find the gradient with respect to our |
96 | 96 | For this example, we will use a very simple ordinary differential equation, newtons law of cooling. We can represent this in Julia like so. |
97 | 97 |
|
98 | 98 | ```@example minibatch |
99 | | -using SciMLSensitivity |
| 99 | +using SciMLSensitivity, MLUtils |
100 | 100 | using DifferentialEquations, Flux, Random, Plots |
101 | 101 | using IterTools: ncycle |
102 | 102 |
|
@@ -152,7 +152,7 @@ ode_data = Array(solve(true_prob, Tsit5(), saveat = t)) |
152 | 152 | prob = ODEProblem{false}(dudt_, u0, tspan, θ) |
153 | 153 |
|
154 | 154 | k = 10 |
155 | | -train_loader = Flux.Data.DataLoader((ode_data, t), batchsize = k) |
| 155 | +train_loader = DataLoader((ode_data, t), batchsize = k) |
156 | 156 |
|
157 | 157 | for (x, y) in train_loader |
158 | 158 | @show x |
|
0 commit comments