Skip to content

Commit c9a8c91

Browse files
author
Bob Carpenter
committed
manual updates: separate out time series, add moving average, add standardization for linear regression, with examples
1 parent 9cab4c1 commit c9a8c91

File tree

5 files changed

+129
-0
lines changed

5 files changed

+129
-0
lines changed
Lines changed: 17 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,17 @@
1+
data {
2+
int<lower=0> N;
3+
vector[N] y;
4+
vector[N] x;
5+
}
6+
parameters {
7+
real alpha;
8+
real beta;
9+
real<lower=0> sigma;
10+
}
11+
model {
12+
alpha ~ normal(0,10);
13+
beta ~ normal(0,10);
14+
sigma ~ cauchy(0,5);
15+
for (n in 1:N)
16+
y[n] ~ normal(alpha + beta * x[n], sigma);
17+
}
Lines changed: 30 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,30 @@
1+
data {
2+
int<lower=0> N;
3+
vector[N] y;
4+
vector[N] x;
5+
}
6+
transformed data {
7+
vector[N] x_std;
8+
vector[N] y_std;
9+
x_std <- (x - mean(x)) / sd(x);
10+
y_std <- (y - mean(y)) / sd(y);
11+
}
12+
parameters {
13+
real alpha_std;
14+
real beta_std;
15+
real<lower=0> sigma_std;
16+
}
17+
model {
18+
alpha_std ~ normal(0,10);
19+
beta_std ~ normal(0,10);
20+
sigma_std ~ cauchy(0,5);
21+
y_std ~ normal(alpha_std + beta_std * x_std, sigma_std);
22+
}
23+
generated quantities {
24+
real alpha;
25+
real beta;
26+
real<lower=0> sigma;
27+
alpha <- sd(y) * (alpha_std + beta_std * mean(x) / sd(x)) + mean(y);
28+
beta <- beta_std * sd(y) / sd(x);
29+
sigma <- sd(y) * sigma_std;
30+
}

misc/moving-avg/ma2-sim.R

Lines changed: 24 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,24 @@
1+
mu <- -1.25;
2+
sigma <- 0.75;
3+
theta <- c(0.7,0.3);
4+
T <- 1000;
5+
T <- 1000;
6+
y <- rep(0,T);
7+
epsilon <- rep(0,T);
8+
9+
predictor <- mu;
10+
y[1] <- rnorm(1,predictor,sigma);
11+
epsilon[1] <- y[1] - predictor;
12+
13+
predictor <- mu + theta[1] * epsilon[1];
14+
y[2] <- rnorm(1,predictor,sigma);
15+
epsilon[2] <- y[2] - predictor;
16+
17+
for (t in 3:T) {
18+
predictor <- mu + theta[1] * epsilon[t - 1] + theta[2] * epsilon[t - 2];
19+
y[t] <- rnorm(1, predictor, sigma);
20+
epsilon[t] <- y[t] - predictor;
21+
}
22+
23+
library('rstan')
24+
# fit <- stan('ma2.stan', data=list(T=T, y=y), iter=500, chains=2, init=0);

misc/moving-avg/ma2.stan

Lines changed: 28 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,28 @@
1+
data {
2+
int<lower=3> T; // number of observations
3+
vector[T] y; // observation at time T
4+
}
5+
parameters {
6+
real mu; // mean
7+
real<lower=0> sigma; // error scale
8+
vector[2] theta; // lag coefficients
9+
}
10+
transformed parameters {
11+
vector[T] epsilon; // error terms
12+
epsilon[1] <- y[1] - mu;
13+
epsilon[2] <- y[2] - mu - theta[1] * epsilon[1];
14+
for (t in 3:T)
15+
epsilon[t] <- ( y[t] - mu
16+
- theta[1] * epsilon[t - 1]
17+
- theta[2] * epsilon[t - 2] );
18+
}
19+
model {
20+
mu ~ cauchy(0,2.5);
21+
theta ~ cauchy(0,2.5);
22+
sigma ~ cauchy(0,2.5);
23+
for (t in 3:T)
24+
y[t] ~ normal(mu
25+
+ theta[1] * epsilon[t - 1]
26+
+ theta[2] * epsilon[t - 2],
27+
sigma);
28+
}

misc/moving-avg/maQ.stan

Lines changed: 30 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,30 @@
1+
data {
2+
int<lower=0> Q; // num previous noise terms
3+
int<lower=3> T; // num observations
4+
vector[T] y; // observation at time t
5+
}
6+
parameters {
7+
real mu; // mean
8+
real<lower=0> sigma; // error scale
9+
vector[2] theta; // error coeff, lag -t
10+
}
11+
transformed parameters {
12+
vector[T] epsilon; // error term at time t
13+
for (t in 1:T) {
14+
epsilon[t] <- y[t] - mu;
15+
for (q in 1:min(t-1,Q))
16+
epsilon[t] <- epsilon[t] - theta[q] * epsilon[t - q];
17+
}
18+
}
19+
model {
20+
vector[T] eta;
21+
mu ~ cauchy(0,2.5);
22+
theta ~ cauchy(0,2.5);
23+
sigma ~ cauchy(0,2.5);
24+
for (t in 1:T) {
25+
eta[t] <- mu;
26+
for (q in 1:min(t-1,Q))
27+
eta[t] <- eta[t] + theta[q] * epsilon[t - q];
28+
}
29+
y ~ normal(eta,sigma);
30+
}

0 commit comments

Comments
 (0)