Skip to content

Commit 46ee487

Browse files
author
Bob Carpenter
committed
models updated, doc updated, semicolon added to end of increment_log_prob function
1 parent 2afe4cc commit 46ee487

File tree

25 files changed

+198
-215
lines changed

25 files changed

+198
-215
lines changed

basic_distributions/normal_mixture.stan

Lines changed: 4 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -11,6 +11,8 @@ parameters {
1111
real y;
1212
}
1313
model {
14-
lp__ <- lp__ + log_sum_exp(log(theta) + normal_log(y,mu[1],sigma[1]),
15-
log(1.0 - theta) + normal_log(y,mu[2],sigma[2]));
14+
increment_log_prob(log_sum_exp(log(theta)
15+
+ normal_log(y,mu[1],sigma[1]),
16+
log(1.0 - theta)
17+
+ normal_log(y,mu[2],sigma[2])));
1618
}

basic_distributions/triangle.stan

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,6 @@
11
parameters {
2-
real<lower=-1,upper=1> y;
2+
real<lower=-1,upper=1> y;
33
}
44
model {
5-
lp__ <- lp__ + log1m(fabs(y));
5+
increment_log_prob(log1m(fabs(y)));
66
}

basic_distributions/wishart2x2.stan

Lines changed: 11 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -1,9 +1,13 @@
11
// Sample from 2 x 2 Wishart
22
// calculate matrix directly through non-matrix parameters
33

4-
// WARNING:
5-
// This simple parameterization only works for 2 x 2
6-
// matrices because positive definiteness is simple.
4+
// This is only an example of how to compute transforms and Jacobians;
5+
// the built-in types cov_matrix and corr_matrix (or their Cholesky
6+
// factor versions) should be used for covariance and correlation
7+
// matrices
8+
9+
// WARNING: This simple parameterization only works for 2 x 2 matrices
10+
// because positive definiteness is simple.
711

812
transformed data {
913
cov_matrix[2] S;
@@ -34,7 +38,7 @@ transformed parameters {
3438
W[2,1] <- cov;
3539
}
3640
model {
37-
// apply log Jacobian determinant of transform
41+
// apply log Jacobian determinant of transform:
3842
// (sd1,sd2,x) -> (W[1,1],W[2,2],W[1,2])
3943
// | d W[1,1] / d sd1 d W[1,1] / d sd2 d W[1,1] / d x |
4044
// J = | d W[2,2] / d sd1 d W[2,2] / d sd2 d W[2,2] / d x |
@@ -44,7 +48,9 @@ model {
4448
// = | 0 2 * sd2 0 |
4549
// | rho * sd2 rho * sd1 sd1 * sd2 * (1 - rho^2) |
4650

47-
lp__ <- lp__ + log(2.0 * sd1) + log(2.0 * sd2) + log(sd1 * sd2 * (1.0 - rho * rho));
51+
increment_log_prob(log(2.0 * sd1)
52+
+ log(2.0 * sd2)
53+
+ log(sd1 * sd2 * (1.0 - rho * rho)));
4854

4955
W ~ wishart(4, S);
5056
}

basic_estimators/normal_censored.stan

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -10,7 +10,8 @@ parameters {
1010
model {
1111
for (n in 1:N_observed)
1212
y[n] ~ normal(mu,1.0) T[,U];
13-
lp__ <- lp__ + N_censored * log1m(normal_cdf(U,mu,1.0));
13+
increment_log_prob(N_censored
14+
* log1m(normal_cdf(U,mu,1.0)));
1415
}
1516

1617

basic_estimators/normal_mixture.stan

Lines changed: 5 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -20,8 +20,9 @@ model {
2020
theta ~ uniform(0,1); // equivalently, ~ beta(1,1);
2121
for (k in 1:2)
2222
mu[k] ~ normal(0,10);
23-
for (n in 1:N) {
24-
lp__ <- lp__ + log_sum_exp(log_theta + normal_log(y[n],mu[1],1.0),
25-
log_one_minus_theta + normal_log(y[n],mu[2],1.0));
26-
}
23+
for (n in 1:N)
24+
increment_log_prob(log_sum_exp(log_theta
25+
+ normal_log(y[n],mu[1],1.0),
26+
log_one_minus_theta
27+
+ normal_log(y[n],mu[2],1.0)));
2728
}

basic_estimators/normal_mixture_k.stan

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -15,6 +15,6 @@ model {
1515
for (k in 1:K)
1616
ps[k] <- log(theta[k])
1717
+ normal_log(y[n],mu[k],sigma[k]);
18-
lp__ <- lp__ + log_sum_exp(ps);
18+
increment_log_prob(log_sum_exp(ps));
1919
}
2020
}

basic_estimators/normal_mixture_k_prop.stan

Lines changed: 1 addition & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -16,8 +16,6 @@ transformed parameters {
1616
}
1717
model {
1818
// prior
19-
// theta ~ dirichlet(rep_vector(1,K));
20-
// mu_prop ~ dirichlet(rep_vector(1,K));
2119
mu_loc ~ cauchy(0,5);
2220
mu_scale ~ cauchy(0,5);
2321
sigma ~ cauchy(0,5);
@@ -33,7 +31,7 @@ model {
3331
ps[k] <- log_theta[k]
3432
+ normal_log(y[n],mu[k],sigma[k]);
3533
}
36-
lp__ <- lp__ + log_sum_exp(ps);
34+
increment_log_prob(log_sum_exp(ps));
3735
}
3836
}
3937
}

bugs_examples/vol1/bones/bones.stan

Lines changed: 31 additions & 36 deletions
Original file line numberDiff line numberDiff line change
@@ -1,59 +1,54 @@
1-
# Bones: latent trait model for multiple ordered
2-
# categorical responses
3-
## http://www.openbugs.info/Examples/Bones.html
4-
5-
6-
## Note:
7-
## 1. Since it is just the response that is
8-
## modelled as categorical distribution,
9-
## we should be able to run the model now except handling
10-
## the missing. However, the data structure is a bit
11-
## difficult to deal with (Allowing some redundancy
12-
## in the transformed parameters (Q here), the model
13-
## is fine in Stan.
14-
## 2. The missing data is recoded as `-1`, which is
15-
## not modeled for `gamma` as in the OpenBUGS example
16-
## and not modeled for `grade`.
17-
1+
/**
2+
* Bones: latent trait model for multiple ordered
3+
* categorical responses
4+
* http://www.openbugs.info/Examples/Bones.html
5+
*
6+
*
7+
* Note:
8+
* 1. Since it is just the response that is
9+
* modelled as categorical distribution,
10+
* we should be able to run the model now except handling
11+
* the missing. However, the data structure is a bit
12+
* difficult to deal with (Allowing some redundancy
13+
* in the transformed parameters (Q here), the model
14+
* is fine in Stan.
15+
* 2. The missing data is recoded as `-1`, which is
16+
* not modeled for `gamma` as in the OpenBUGS example
17+
* and not modeled for `grade`.
18+
*/
1819

1920
data {
2021
int<lower=0> nChild;
2122
int<lower=0> nInd;
22-
real gamma[nInd, 4]; // -1 indicates NA in original R dump data (bones.data.R.0)
23+
real gamma[nInd, 4]; // -1 if missing
2324
real delta[nInd];
2425
int<lower=0> ncat[nInd];
25-
int grade[nChild, nInd]; // -1 indicates NA in original R dump data (bones.data.R.0)
26+
int grade[nChild, nInd]; // -1 if missing
2627
}
27-
28-
2928
parameters {
3029
real theta[nChild];
3130
}
32-
3331
model {
3432
real p[nChild, nInd, 5];
3533
real Q[nChild, nInd, 4];
3634
theta ~ normal(0.0, 36);
37-
for(i in 1:nChild) {
38-
# Probability of observing grade k given theta
35+
for (i in 1:nChild) {
36+
// Probability of observing grade k given theta
3937
for (j in 1:nInd) {
40-
# Cumulative probability of > grade k given theta
41-
for (k in 1:(ncat[j] - 1)) {
38+
// Cumulative probability of > grade k given theta
39+
for (k in 1:(ncat[j] - 1))
4240
Q[i, j, k] <- inv_logit(delta[j] * (theta[i] - gamma[j, k]));
43-
}
44-
4541
p[i, j, 1] <- 1 - Q[i, j, 1];
4642
for (k in 2:(ncat[j] - 1))
4743
p[i, j, k] <- Q[i, j, k - 1] - Q[i, j, k];
4844
p[i, j, ncat[j]] <- Q[i, j, ncat[j] - 1];
49-
// grade[i, j] - 1 ~ categorical(p[i, j, 1:ncat[j]])
50-
51-
// We use lp__ instead, since grade[i, j] has categorical distribution
52-
// with varying dimension.
53-
// If grade[i, j] = -1, it is missing, no contribution for lp__ then.
54-
if (grade[i, j] != -1) {
55-
lp__ <- lp__ + log(p[i, j, grade[i, j]]);
56-
}
45+
46+
// incement log probability directly because grade[i, j]
47+
// has categorical distribution with varying dimension.
48+
// for missing grade[i, j] = -1, there is no log prob
49+
// contribution
50+
if (grade[i, j] != -1)
51+
increment_log_prob(log(p[i, j, grade[i, j]]));
5752
}
5853
}
5954
}

bugs_examples/vol1/leuk/leuk.stan

Lines changed: 9 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,9 @@
1-
# Leuk: Cox regression
2-
# URL of OpenBugs' implementation:
3-
# http://www.openbugs.info/Examples/Leuk.html
1+
/*
2+
* Leuk: Cox regression
3+
* URL of OpenBugs' implementation:
4+
* http://www.openbugs.info/Examples/Leuk.html
5+
*/
6+
47
data {
58
int<lower=0> N;
69
int<lower=0> NT;
@@ -9,7 +12,6 @@ data {
912
int<lower=0> fail[N];
1013
real Z[N];
1114
}
12-
1315
transformed data {
1416
int Y[N, NT];
1517
int dN[N, NT];
@@ -24,22 +26,20 @@ transformed data {
2426
c <- 0.001;
2527
r <- 0.1;
2628
}
27-
2829
parameters {
2930
real beta;
3031
real<lower=0> dL0[NT];
3132
}
32-
3333
model {
3434
beta ~ normal(0, 1000);
3535
for(j in 1:NT) {
3636
dL0[j] ~ gamma(r * (t[j + 1] - t[j]) * c, c);
3737
for(i in 1:N) {
38-
if (Y[i, j] != 0) lp__ <- lp__ + poisson_log(dN[i, j], Y[i, j] * exp(beta * Z[i]) * dL0[j]);
38+
if (Y[i, j] != 0)
39+
increment_log_prob(poisson_log(dN[i, j], Y[i, j] * exp(beta * Z[i]) * dL0[j]));
3940
}
4041
}
4142
}
42-
4343
generated quantities {
4444
real S_placebo[NT];
4545
real S_treat[NT];
@@ -51,7 +51,7 @@ generated quantities {
5151
for (i in 1:j)
5252
s <- s + dL0[i];
5353
S_treat[j] <- pow(exp(-s), exp(beta * -0.5));
54-
S_placebo[j] <- pow(exp(-s), exp(beta * 0.5));
54+
S_placebo[j] <- pow(exp(-s), exp(beta * 0.5));
5555
}
5656

5757
}
Lines changed: 17 additions & 20 deletions
Original file line numberDiff line numberDiff line change
@@ -1,16 +1,15 @@
1-
# BUGS example vol 1: LeukFr
2-
# http://mathstat.helsinki.fi/openbugs/Examples/Leukfr.html
3-
# http://www.openbugs.info/Examples/Leukfr.html
4-
5-
# The result for sigma is a bit different from those in the
6-
# webpage.
7-
8-
# But the result for beta on
9-
# http://www.mrc-bsu.cam.ac.uk/bugs/winbugs/Vol1.pdf
10-
# might not be correct.
11-
12-
13-
1+
/*
2+
* BUGS example vol 1: LeukFr
3+
* http://mathstat.helsinki.fi/openbugs/Examples/Leukfr.html
4+
* http://www.openbugs.info/Examples/Leukfr.html
5+
*
6+
* The result for sigma is a bit different from those in the
7+
* webpage.
8+
9+
* But the result for beta on
10+
* http://www.mrc-bsu.cam.ac.uk/bugs/winbugs/Vol1.pdf
11+
* might not be correct.
12+
*/
1413
data {
1514
int<lower=0> N;
1615
int<lower=0> NT;
@@ -21,7 +20,6 @@ data {
2120
int<lower=0> pair[N];
2221
real Z[N];
2322
}
24-
2523
transformed data {
2624
int Y[N, NT];
2725
int dN[N, NT];
@@ -37,28 +35,27 @@ transformed data {
3735
c <- 0.001;
3836
r <- 0.1;
3937
}
40-
4138
parameters {
4239
real beta;
4340
real<lower=0> tau;
4441
real<lower=0> dL0[NT];
4542
real b[Npair];
4643
}
47-
4844
transformed parameters {
4945
real<lower=0> sigma;
5046
sigma <- 1 / sqrt(tau);
5147
}
52-
5348
model {
5449
beta ~ normal(0, 1000);
5550
tau ~ gamma(.001, .001);
5651
b ~ normal(0, sigma);
5752
for(j in 1:NT) {
5853
dL0[j] ~ gamma(r * (t[j + 1] - t[j]) * c, c);
59-
for(i in 1:N) {
60-
// lp__ <- lp__ + if_else(Y[i, j], poisson_log(dN[i, j], Y[i, j] * exp(beta * Z[i] + b[pair[i]]) * dL0[j]), 0);
61-
if (Y[i, j] != 0) lp__ <- lp__ + poisson_log(dN[i, j], Y[i, j] * exp(beta * Z[i] + b[pair[i]]) * dL0[j]);
54+
for (i in 1:N) {
55+
if (Y[i, j] != 0)
56+
increment_log_prob(poisson_log(dN[i, j], Y[i, j]
57+
* exp(beta * Z[i] + b[pair[i]])
58+
* dL0[j]));
6259
}
6360
}
6461
}

0 commit comments

Comments
 (0)