Skip to content
1 change: 0 additions & 1 deletion Project.toml
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,6 @@ DynamicPPL = "366bfd00-2699-11ea-058f-f148b4cae6d8"
Enzyme = "7da242da-08ed-463a-9acd-ee780be4f1d9"
FiniteDifferences = "26cc04aa-876d-5657-8c51-4c34ba976000"
ForwardDiff = "f6369f11-7733-5829-9624-2563aa707210"
LazyArrays = "5078a376-72f3-5289-bfd5-ec5146d43c02"
LinearAlgebra = "37e2e46d-f89d-539d-b4ee-838fcccc9c8e"
LogDensityProblems = "6fdf6af0-433a-55f7-b3ed-c6c6e0b8df7c"
MLDatasets = "eb30cadb-4394-5ae3-aed4-317e484a6458"
Expand Down
2 changes: 1 addition & 1 deletion models/dppl_gauss_unknown.jl
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@ y = randn() .+ s * randn(n)
N = length(y)
m ~ Normal(0, 1)
s ~ truncated(Cauchy(0, 5); lower=0)
y ~ filldist(Normal(m, s), N)
y ~ product_distribution(fill(Normal(m, s), N))
end

model = dppl_gauss_unknown(y)
7 changes: 2 additions & 5 deletions models/dppl_hier_poisson.jl
Original file line number Diff line number Diff line change
@@ -1,4 +1,3 @@
using LazyArrays
using Turing: LogPoisson

nd, ns = 5, 10
Expand All @@ -13,15 +12,13 @@ y = mapreduce(λi -> rand(Poisson(λi), nd), vcat, λ)
x = repeat(logpop, inner=nd)
idx = repeat(collect(1:ns), inner=nd)

lazyarray(f, x) = LazyArray(Base.broadcasted(f, x))

@model function dppl_hier_poisson(y, x, idx, ns)
a0 ~ Normal(0, 10)
a1 ~ Normal(0, 1)
a0_sig ~ truncated(Cauchy(0, 1); lower=0)
a0s ~ filldist(Normal(0, a0_sig), ns)
a0s ~ product_distribution(fill(Normal(0, a0_sig), ns))
alpha = a0 .+ a0s[idx] .+ a1 * x
y ~ arraydist(lazyarray(LogPoisson, alpha))
y ~ product_distribution(LogPoisson.(alpha))
end

model = dppl_hier_poisson(y, x, idx, ns)
2 changes: 1 addition & 1 deletion models/dppl_high_dim_gauss.jl
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
@model function dppl_high_dim_gauss(D)
m ~ filldist(Normal(0, 1), D)
m ~ product_distribution(fill(Normal(0, 1), D))
end

model = dppl_high_dim_gauss(10_000)
4 changes: 2 additions & 2 deletions models/dppl_hmm_semisup.jl
Original file line number Diff line number Diff line change
Expand Up @@ -28,8 +28,8 @@ for t in 2:T_unsup
end

@model function dppl_hmm_semisup(K, T, T_unsup, w, z, u, alpha, beta)
theta ~ filldist(Dirichlet(alpha), K)
phi ~ filldist(Dirichlet(beta), K)
theta ~ product_distribution(fill(Dirichlet(alpha), K))
phi ~ product_distribution(fill(Dirichlet(beta), K))
for t in 1:T
w[t] ~ Categorical(phi[:, z[t]]);
end
Expand Down
4 changes: 2 additions & 2 deletions models/dppl_lda.jl
Original file line number Diff line number Diff line change
Expand Up @@ -21,8 +21,8 @@ for i in 1:m
end

@model function dppl_lda(k, m, w, doc, alpha, beta)
theta ~ filldist(Dirichlet(alpha), m)
phi ~ filldist(Dirichlet(beta), k)
theta ~ product_distribution(fill(Dirichlet(alpha), m))
phi ~ product_distribution(fill(Dirichlet(beta), k))
log_phi_dot_theta = log.(phi * theta)
@addlogprob! sum(log_phi_dot_theta[CartesianIndex.(w, doc)])
end
Expand Down
14 changes: 6 additions & 8 deletions models/dppl_logistic_regression.jl
Original file line number Diff line number Diff line change
@@ -1,21 +1,19 @@
using StatsFuns: logistic
using LazyArrays

function safelogistic(x::T) where {T}
logistic(x) * (1 - 2 * eps(T)) + eps(T)
end

d, n = 100, 10_000
X = randn(d, n)
w = randn(d)
y = Int.(logistic.(X' * w) .> 0.5)

function safelogistic(x::T) where {T}
logistic(x) * (1 - 2 * eps(T)) + eps(T)
end

lazyarray(f, x) = LazyArray(Base.broadcasted(f, x))

@model function dppl_logistic_regression(Xt, y)
N, D = size(Xt)
w ~ filldist(Normal(), D)
y ~ arraydist(lazyarray(x -> Bernoulli(safelogistic(x)), Xt * w))
w ~ product_distribution(Normal.(zeros(D)))
y ~ product_distribution(Bernoulli.(safelogistic.(Xt * w)))
end

model = dppl_logistic_regression(X', y)
6 changes: 3 additions & 3 deletions models/dppl_naive_bayes.jl
Original file line number Diff line number Diff line change
Expand Up @@ -16,12 +16,12 @@ image = transform(pca, image_raw)
# Take only the first 1000 images and vectorise
N = 1000
image_subset = image[:, 1:N]'
image_vec = vec(image_subset[:, :])
image_vec = image_subset[:, :]
labels = labels[1:N]

@model function dppl_naive_bayes(image_vec, labels, C, D)
m ~ filldist(Normal(0, 10), C, D)
image_vec ~ MvNormal(vec(m[labels, :]), I)
m ~ product_distribution(fill(Normal(0, 10), C, D))
image_vec ~ product_distribution(Normal.(m[labels, :]))
end

model = dppl_naive_bayes(image_vec, labels, C, D)
Loading