@@ -9,26 +9,22 @@ theta = rand(Dirichlet(alpha), m)
9
9
doc_lengths = rand (Poisson (1_000 ), m)
10
10
n = sum (doc_lengths)
11
11
12
- w_lda = Vector {Int} (undef, n)
13
- doc_lda = Vector {Int} (undef, n)
12
+ w = Vector {Int} (undef, n)
13
+ doc = Vector {Int} (undef, n)
14
14
for i in 1 : m
15
- # Because all the models exist in the same scope, we need
16
- # to add some variable suffixes to avoid local/global
17
- # scope warnings. This is quite ugly and should be solved
18
- # properly, using e.g. modules or functions.
19
- local idx_lda = sum (doc_lengths[1 : i- 1 ]) # starting index for inner loop
15
+ local idx = sum (doc_lengths[1 : i- 1 ]) # starting index for inner loop
20
16
for j in 1 : doc_lengths[i]
21
- z_lda = rand (Categorical (theta[:, i]))
22
- w_lda[idx_lda + j] = rand (Categorical (phi[:, z_lda ]))
23
- doc_lda[idx_lda + j] = i
17
+ z = rand (Categorical (theta[:, i]))
18
+ w[idx + j] = rand (Categorical (phi[:, z ]))
19
+ doc[idx + j] = i
24
20
end
25
21
end
26
22
27
23
@model function dppl_lda (k, m, w, doc, alpha, beta)
28
24
theta ~ filldist (Dirichlet (alpha), m)
29
25
phi ~ filldist (Dirichlet (beta), k)
30
26
log_phi_dot_theta = log .(phi * theta)
31
- DynamicPPL . @addlogprob! sum (log_phi_dot_theta[CartesianIndex .(w, doc)])
27
+ @addlogprob! sum (log_phi_dot_theta[CartesianIndex .(w, doc)])
32
28
end
33
29
34
- model = dppl_lda (k, m, w_lda, doc_lda , alpha, beta)
30
+ model = dppl_lda (k, m, w, doc , alpha, beta)
0 commit comments