@@ -131,10 +131,10 @@ with pm.Model(coords=coords, check_bounds=False) as model:
131
131
rho = pm.Normal(name="rho", mu=0, sigma=0.2, dims=("lags",))
132
132
sigma = pm.HalfNormal(name="sigma", sigma=0.2)
133
133
134
- ar_init_obs = pm.MutableData (name="ar_init_obs", value=np.zeros(lags), dims=("lags",))
134
+ ar_init_obs = pm.Data (name="ar_init_obs", value=np.zeros(lags), dims=("lags",))
135
135
ar_init = pm.Normal(name="ar_init", observed=ar_init_obs, dims=("lags",))
136
136
137
- ar_innov_obs = pm.MutableData ("ar_innov_obs", np.zeros(trials - lags), dims=("steps",))
137
+ ar_innov_obs = pm.Data ("ar_innov_obs", np.zeros(trials - lags), dims=("steps",))
138
138
ar_innov = pm.CustomDist(
139
139
"ar_dist",
140
140
ar_init,
@@ -179,7 +179,7 @@ for i, hdi_prob in enumerate((0.94, 0.64), 1):
179
179
ax.plot(prior.prior["ar"].mean(("chain", "draw")), color="C0", label="Mean")
180
180
ax.legend(loc="upper right")
181
181
ax.set_xlabel("time")
182
- ax.set_title("AR(2) Prior Samples", fontsize=18, fontweight="bold")
182
+ ax.set_title("AR(2) Prior Samples", fontsize=18, fontweight="bold");
183
183
```
184
184
185
185
It is not surprising that the prior distribution is a stationary process around zero given that the prior of the ` rho ` parameter is far from one.
@@ -197,7 +197,7 @@ for i, axi in enumerate(ax, start=chosen_draw):
197
197
color="C0" if i == chosen_draw else "black",
198
198
)
199
199
axi.set_title(f"Sample {i}", fontsize=18, fontweight="bold")
200
- ax[-1].set_xlabel("time")
200
+ ax[-1].set_xlabel("time");
201
201
```
202
202
203
203
## Posterior
@@ -239,14 +239,14 @@ axes = az.plot_trace(
239
239
],
240
240
backend_kwargs={"figsize": (12, 5), "layout": "constrained"},
241
241
)
242
- plt.gcf().suptitle("AR(2) Model Trace", fontsize=18, fontweight="bold")
242
+ plt.gcf().suptitle("AR(2) Model Trace", fontsize=18, fontweight="bold");
243
243
```
244
244
245
245
``` {code-cell} ipython3
246
246
axes = az.plot_posterior(
247
247
trace, var_names=["rho", "sigma"], ref_val=[*rho_true, sigma_true], figsize=(15, 5)
248
248
)
249
- plt.gcf().suptitle("AR(2) Model Parameters Posterior", fontsize=18, fontweight="bold")
249
+ plt.gcf().suptitle("AR(2) Model Parameters Posterior", fontsize=18, fontweight="bold");
250
250
```
251
251
252
252
We see we have successfully recovered the true parameters of the model.
@@ -282,7 +282,7 @@ ax.plot(prior.prior["ar"].mean(("chain", "draw")), color="C0", label="Mean")
282
282
ax.plot(ar_obs, color="black", label="Observed")
283
283
ax.legend(loc="upper right")
284
284
ax.set_xlabel("time")
285
- ax.set_title("AR(2) Posterior Predictive Samples", fontsize=18, fontweight="bold")
285
+ ax.set_title("AR(2) Posterior Predictive Samples", fontsize=18, fontweight="bold");
286
286
```
287
287
288
288
Overall, we the model is capturing the global dynamics of the time series. In order to have a abetter insight of the model, we can plot a subset of the posterior samples and compare them with the observed data.
@@ -299,7 +299,7 @@ for i, axi in enumerate(ax):
299
299
300
300
ax[-1].set_xlabel("time")
301
301
302
- fig.suptitle("AR(2) Posterior Predictive Samples", fontsize=18, fontweight="bold", y=1.05)
302
+ fig.suptitle("AR(2) Posterior Predictive Samples", fontsize=18, fontweight="bold", y=1.05);
303
303
```
304
304
305
305
## Authors
0 commit comments