|
| 1 | +using Flowfusion, ForwardBackward, Flux, RandomFeatureMaps, Optimisers, Plots |
| 2 | + |
| 3 | +#Set up a Flux model: X̂1 = model(t,Xt) |
| 4 | +struct FModel{A} |
| 5 | + layers::A |
| 6 | +end |
| 7 | +Flux.@layer FModel |
| 8 | +function FModel(; embeddim = 128, spacedim = 2, layers = 3) |
| 9 | + embed_time = Chain(RandomFourierFeatures(1 => embeddim, 1f0), Dense(embeddim => embeddim, swish)) |
| 10 | + embed_state = Chain(RandomFourierFeatures(2 => embeddim, 1f0), Dense(embeddim => embeddim, swish)) |
| 11 | + ffs = [Dense(embeddim => embeddim, swish) for _ in 1:layers] |
| 12 | + decode = Dense(embeddim => spacedim) |
| 13 | + layers = (; embed_time, embed_state, ffs, decode) |
| 14 | + FModel(layers) |
| 15 | +end |
| 16 | + |
| 17 | +function (f::FModel)(t, Xt) |
| 18 | + l = f.layers |
| 19 | + tXt = tensor(Xt) |
| 20 | + tv = zero(tXt[1:1,:]) .+ expand(t, ndims(tXt)) |
| 21 | + x = l.embed_time(tv) .+ l.embed_state(tXt) |
| 22 | + for ff in l.ffs |
| 23 | + x = x .+ ff(x) |
| 24 | + end |
| 25 | + tXt .+ l.decode(x) .* (1.05f0 .- expand(t, ndims(tXt))) |
| 26 | +end |
| 27 | + |
| 28 | +#Distributions for training: |
| 29 | +T = Float32 |
| 30 | +sampleX0(n_samples) = rand(T, 2, n_samples) .+ 2 |
| 31 | +sampleX1(n_samples) = Flowfusion.random_literal_cat(n_samples, sigma = T(0.05)) |
| 32 | +n_samples = 400 |
| 33 | + |
| 34 | +#Look over three different process settings: |
| 35 | +for (θ, v_at_0, v_at_1, dec) in [(10f0, 5f0, 0.01f0, -2f0),(2f0, 2f0, 0.1f0, -2f0),(10f0, 2f0, 0.1f0, -2f0)] |
| 36 | + #The process: |
| 37 | + P = OUFlow(θ, v_at_0, v_at_1, dec) |
| 38 | + |
| 39 | + #Optimizer: |
| 40 | + eta = 0.001 |
| 41 | + model = FModel(embeddim = 256, layers = 3, spacedim = 2) |
| 42 | + opt_state = Flux.setup(AdamW(eta = eta), model) |
| 43 | + |
| 44 | + iters = 4000 |
| 45 | + for i in 1:iters |
| 46 | + #Set up a batch of training pairs, and t: |
| 47 | + X0 = ContinuousState(sampleX0(n_samples)) |
| 48 | + X1 = ContinuousState(sampleX1(n_samples)) |
| 49 | + t = rand(T, n_samples).*0.999f0 |
| 50 | + #Construct the bridge: |
| 51 | + Xt = bridge(P, X0, X1, t) |
| 52 | + #Gradient & update: |
| 53 | + l,g = Flux.withgradient(model) do m |
| 54 | + floss(P, m(t,Xt), X1, scalefloss(P, t)) |
| 55 | + end |
| 56 | + Flux.update!(opt_state, model, g[1]) |
| 57 | + (i % 10 == 0) && println("i: $i; Loss: $l") |
| 58 | + end |
| 59 | + |
| 60 | + n_inference_samples = 5000 |
| 61 | + X0 = ContinuousState(sampleX0(n_inference_samples)) |
| 62 | + paths = Tracker() |
| 63 | + samples = gen(P, X0, model, 0f0:0.005f0:1f0, tracker = paths) |
| 64 | + #Plotting: |
| 65 | + pl = scatter(X0.state[1,:],X0.state[2,:], msw = 0, ms = 1, color = "blue", alpha = 0.5, size = (400,400), legend = :topleft, label = "X0") |
| 66 | + tvec = stack_tracker(paths, :t) |
| 67 | + xttraj = stack_tracker(paths, :xt) |
| 68 | + for i in 1:50:1000 |
| 69 | + plot!(xttraj[1,i,:], xttraj[2,i,:], color = "red", label = i==1 ? "Trajectory" : :none, alpha = 0.4) |
| 70 | + end |
| 71 | + X1true = sampleX1(n_inference_samples) |
| 72 | + scatter!(X1true[1,:],X1true[2,:], msw = 0, ms = 1, color = "orange", alpha = 0.5, label = "X1 (true)") |
| 73 | + scatter!(samples.state[1,:],samples.state[2,:], msw = 0, ms = 1, color = "green", alpha = 0.5, label = "X1 (generated)") |
| 74 | + display(pl) |
| 75 | + savefig("OU_continuous_cat_$P.svg") |
| 76 | + pl = plot() |
| 77 | + for i in 1:50:1000 |
| 78 | + plot!(xttraj[1,i,:], color = "red", alpha = 0.4, label = :none) |
| 79 | + end |
| 80 | + pl |
| 81 | + savefig("OU_continuous_traj_$P.svg") |
| 82 | +end |
0 commit comments