diff --git a/Project.toml b/Project.toml index a9f9964..39f8ef3 100644 --- a/Project.toml +++ b/Project.toml @@ -9,7 +9,6 @@ DynamicPPL = "366bfd00-2699-11ea-058f-f148b4cae6d8" Enzyme = "7da242da-08ed-463a-9acd-ee780be4f1d9" FiniteDifferences = "26cc04aa-876d-5657-8c51-4c34ba976000" ForwardDiff = "f6369f11-7733-5829-9624-2563aa707210" -LazyArrays = "5078a376-72f3-5289-bfd5-ec5146d43c02" LinearAlgebra = "37e2e46d-f89d-539d-b4ee-838fcccc9c8e" LogDensityProblems = "6fdf6af0-433a-55f7-b3ed-c6c6e0b8df7c" MLDatasets = "eb30cadb-4394-5ae3-aed4-317e484a6458" diff --git a/models/dppl_gauss_unknown.jl b/models/dppl_gauss_unknown.jl index b70bacd..015548d 100644 --- a/models/dppl_gauss_unknown.jl +++ b/models/dppl_gauss_unknown.jl @@ -6,7 +6,7 @@ y = randn() .+ s * randn(n) N = length(y) m ~ Normal(0, 1) s ~ truncated(Cauchy(0, 5); lower=0) - y ~ filldist(Normal(m, s), N) + y ~ product_distribution(fill(Normal(m, s), N)) end model = dppl_gauss_unknown(y) diff --git a/models/dppl_hier_poisson.jl b/models/dppl_hier_poisson.jl index 6850353..d69dece 100644 --- a/models/dppl_hier_poisson.jl +++ b/models/dppl_hier_poisson.jl @@ -1,4 +1,3 @@ -using LazyArrays using Turing: LogPoisson nd, ns = 5, 10 @@ -13,15 +12,13 @@ y = mapreduce(λi -> rand(Poisson(λi), nd), vcat, λ) x = repeat(logpop, inner=nd) idx = repeat(collect(1:ns), inner=nd) -lazyarray(f, x) = LazyArray(Base.broadcasted(f, x)) - @model function dppl_hier_poisson(y, x, idx, ns) a0 ~ Normal(0, 10) a1 ~ Normal(0, 1) a0_sig ~ truncated(Cauchy(0, 1); lower=0) - a0s ~ filldist(Normal(0, a0_sig), ns) + a0s ~ product_distribution(fill(Normal(0, a0_sig), ns)) alpha = a0 .+ a0s[idx] .+ a1 * x - y ~ arraydist(lazyarray(LogPoisson, alpha)) + y ~ product_distribution(LogPoisson.(alpha)) end model = dppl_hier_poisson(y, x, idx, ns) diff --git a/models/dppl_high_dim_gauss.jl b/models/dppl_high_dim_gauss.jl index 58d09ef..395cba4 100644 --- a/models/dppl_high_dim_gauss.jl +++ b/models/dppl_high_dim_gauss.jl @@ -1,5 +1,5 @@ @model function dppl_high_dim_gauss(D) - m ~ filldist(Normal(0, 1), D) + m ~ product_distribution(fill(Normal(0, 1), D)) end model = dppl_high_dim_gauss(10_000) diff --git a/models/dppl_hmm_semisup.jl b/models/dppl_hmm_semisup.jl index a165e94..17dbda5 100644 --- a/models/dppl_hmm_semisup.jl +++ b/models/dppl_hmm_semisup.jl @@ -28,8 +28,8 @@ for t in 2:T_unsup end @model function dppl_hmm_semisup(K, T, T_unsup, w, z, u, alpha, beta) - theta ~ filldist(Dirichlet(alpha), K) - phi ~ filldist(Dirichlet(beta), K) + theta ~ product_distribution(fill(Dirichlet(alpha), K)) + phi ~ product_distribution(fill(Dirichlet(beta), K)) for t in 1:T w[t] ~ Categorical(phi[:, z[t]]); end diff --git a/models/dppl_lda.jl b/models/dppl_lda.jl index b807d38..4522eb2 100644 --- a/models/dppl_lda.jl +++ b/models/dppl_lda.jl @@ -21,8 +21,8 @@ for i in 1:m end @model function dppl_lda(k, m, w, doc, alpha, beta) - theta ~ filldist(Dirichlet(alpha), m) - phi ~ filldist(Dirichlet(beta), k) + theta ~ product_distribution(fill(Dirichlet(alpha), m)) + phi ~ product_distribution(fill(Dirichlet(beta), k)) log_phi_dot_theta = log.(phi * theta) @addlogprob! sum(log_phi_dot_theta[CartesianIndex.(w, doc)]) end diff --git a/models/dppl_logistic_regression.jl b/models/dppl_logistic_regression.jl index 3b21928..9081286 100644 --- a/models/dppl_logistic_regression.jl +++ b/models/dppl_logistic_regression.jl @@ -1,21 +1,19 @@ using StatsFuns: logistic -using LazyArrays + +function safelogistic(x::T) where {T} + logistic(x) * (1 - 2 * eps(T)) + eps(T) +end d, n = 100, 10_000 X = randn(d, n) w = randn(d) y = Int.(logistic.(X' * w) .> 0.5) -function safelogistic(x::T) where {T} - logistic(x) * (1 - 2 * eps(T)) + eps(T) -end - -lazyarray(f, x) = LazyArray(Base.broadcasted(f, x)) @model function dppl_logistic_regression(Xt, y) N, D = size(Xt) - w ~ filldist(Normal(), D) - y ~ arraydist(lazyarray(x -> Bernoulli(safelogistic(x)), Xt * w)) + w ~ product_distribution(Normal.(zeros(D))) + y ~ product_distribution(Bernoulli.(safelogistic.(Xt * w))) end model = dppl_logistic_regression(X', y) diff --git a/models/dppl_naive_bayes.jl b/models/dppl_naive_bayes.jl index 2bc1cfa..498ba90 100644 --- a/models/dppl_naive_bayes.jl +++ b/models/dppl_naive_bayes.jl @@ -16,12 +16,12 @@ image = transform(pca, image_raw) # Take only the first 1000 images and vectorise N = 1000 image_subset = image[:, 1:N]' -image_vec = vec(image_subset[:, :]) +image_vec = image_subset[:, :] labels = labels[1:N] @model function dppl_naive_bayes(image_vec, labels, C, D) - m ~ filldist(Normal(0, 10), C, D) - image_vec ~ MvNormal(vec(m[labels, :]), I) + m ~ product_distribution(fill(Normal(0, 10), C, D)) + image_vec ~ product_distribution(Normal.(m[labels, :])) end model = dppl_naive_bayes(image_vec, labels, C, D)