@@ -5,9 +5,9 @@ jupytext:
5
5
format_name : myst
6
6
format_version : 0.13
7
7
kernelspec :
8
- display_name : pymc-examples
8
+ display_name : pymc
9
9
language : python
10
- name : pymc-examples
10
+ name : python3
11
11
---
12
12
13
13
(hsgp-advanced)=
@@ -44,6 +44,7 @@ A secondary goal of this implementation is flexibility via an accessible impleme
44
44
import arviz as az
45
45
import matplotlib.pyplot as plt
46
46
import numpy as np
47
+ import preliz as pz
47
48
import pymc as pm
48
49
import pytensor.tensor as pt
49
50
```
@@ -140,7 +141,9 @@ cov_mu = cov_trend + cov_short
140
141
# Define the delta GPs
141
142
n_gps = 10
142
143
eta_delta_true = 3
143
- ell_delta_true = pm.draw(pm.Lognormal.dist(mu=np.log(ell_mu_short_true), sigma=0.5), draws=n_gps)
144
+ ell_delta_true = pm.draw(
145
+ pm.Lognormal.dist(mu=np.log(ell_mu_short_true), sigma=0.5), draws=n_gps, random_seed=rng
146
+ )
144
147
145
148
cov_deltas = [
146
149
eta_delta_true**2 * pm.gp.cov.Matern52(input_dim=1, ls=ell_i) for ell_i in ell_delta_true
@@ -166,12 +169,14 @@ def generate_gp_samples(x, cov_mu, cov_deltas, noise_dist, rng):
166
169
"""
167
170
n = len(x)
168
171
# One draw from the mean GP
169
- f_mu_true = pm.draw(pm.MvNormal.dist(mu=np.zeros(n), cov=cov_mu(x[:, None])))
172
+ f_mu_true = pm.draw(pm.MvNormal.dist(mu=np.zeros(n), cov=cov_mu(x[:, None])), random_seed=rng )
170
173
171
174
# Draws from the delta GPs
172
175
f_deltas = []
173
176
for cov_delta in cov_deltas:
174
- f_deltas.append(pm.draw(pm.MvNormal.dist(mu=np.zeros(n), cov=cov_delta(x[:, None]))))
177
+ f_deltas.append(
178
+ pm.draw(pm.MvNormal.dist(mu=np.zeros(n), cov=cov_delta(x[:, None])), random_seed=rng)
179
+ )
175
180
f_delta = np.vstack(f_deltas)
176
181
177
182
# The hierarchical GP
@@ -435,10 +440,9 @@ with pm.Model(coords=coords) as model:
435
440
ell_mu_short = pm.Deterministic("ell_mu_short", pt.softplus(log_ell_mu_short))
436
441
437
442
eta_mu_trend = pm.Gamma("eta_mu_trend", mu=3.5, sigma=1)
438
- ell_mu_trend_params = pm.find_constrained_prior (
439
- pm.InverseGamma, lower=5, upper=12, mass=0.95, init_guess={"mu": 9.0, "sigma": 3.0}
443
+ ell_mu_trend = pz.maxent(pz.InverseGamma(), lower=5, upper=12, mass=0.95, plot=False).to_pymc (
444
+ "ell_mu_trend"
440
445
)
441
- ell_mu_trend = pm.InverseGamma("ell_mu_trend", **ell_mu_trend_params)
442
446
443
447
## Prior for the offsets
444
448
log_ell_delta_offset = pm.ZeroSumNormal("log_ell_delta_offset", dims="gp_ix")
@@ -473,7 +477,7 @@ Now, what do these priors mean? Good question. As always, it's crucial to do **p
473
477
474
478
```{code-cell} ipython3
475
479
with model:
476
- idata = pm.sample_prior_predictive()
480
+ idata = pm.sample_prior_predictive(random_seed=rng )
477
481
```
478
482
479
483
```{code-cell} ipython3
@@ -564,7 +568,7 @@ Once we're satisfied with our priors, which is the case here, we can... sample t
564
568
565
569
```{code-cell} ipython3
566
570
with model:
567
- idata.extend(pm.sample(nuts_sampler="numpyro", target_accept=0.9))
571
+ idata.extend(pm.sample(nuts_sampler="numpyro", target_accept=0.9, random_seed=rng ))
568
572
```
569
573
570
574
```{code-cell} ipython3
@@ -669,6 +673,7 @@ with model:
669
673
var_names=["f_mu", "f"],
670
674
predictions=True,
671
675
compile_kwargs={"mode": "NUMBA"},
676
+ random_seed=rng,
672
677
),
673
678
)
674
679
```
@@ -863,7 +868,11 @@ cov_t = pm.gp.cov.Matern52(input_dim=1, ls=ell_t_true)
863
868
Kt = cov_t(t[:, None])
864
869
865
870
K = pt.slinalg.kron(Kx, Kt)
866
- f_true = pm.draw(pm.MvNormal.dist(mu=np.zeros(n_gps * n_t), cov=K)).reshape(n_gps, n_t).T
871
+ f_true = (
872
+ pm.draw(pm.MvNormal.dist(mu=np.zeros(n_gps * n_t), cov=K), random_seed=rng)
873
+ .reshape(n_gps, n_t)
874
+ .T
875
+ )
867
876
868
877
# Additive gaussian noise
869
878
sigma_noise = 0.5
@@ -947,17 +956,11 @@ with pm.Model() as model:
947
956
Xs_x = Xx - xx_center
948
957
949
958
## covariance on time GP
950
- ell_t_params = pm.find_constrained_prior(
951
- pm.Lognormal, lower=0.5, upper=4.0, mass=0.95, init_guess={"mu": 1.0, "sigma": 1.0}
952
- )
953
- ell_t = pm.Lognormal("ell_t", **ell_t_params)
959
+ ell_t = pz.maxent(pz.LogNormal(), lower=0.5, upper=4.0, mass=0.95, plot=False).to_pymc("ell_t")
954
960
cov_t = pm.gp.cov.Matern52(1, ls=ell_t)
955
961
956
962
## covariance on space GP
957
- ell_x_params = pm.find_constrained_prior(
958
- pm.Lognormal, lower=0.5, upper=4.0, mass=0.95, init_guess={"mu": 1.0, "sigma": 1.0}
959
- )
960
- ell_x = pm.Lognormal("ell_x", **ell_x_params)
963
+ ell_x = pz.maxent(pz.LogNormal(), lower=0.5, upper=4.0, mass=0.95, plot=False).to_pymc("ell_x")
961
964
cov_x = pm.gp.cov.Matern52(1, ls=ell_x)
962
965
963
966
## Kronecker GP
@@ -981,7 +984,7 @@ pm.model_to_graphviz(model)
981
984
982
985
```{code-cell} ipython3
983
986
with model:
984
- idata = pm.sample_prior_predictive()
987
+ idata = pm.sample_prior_predictive(random_seed=rng )
985
988
```
986
989
987
990
```{code-cell} ipython3
@@ -1015,7 +1018,7 @@ axs[1].set(ylim=ylims, title=r"Prior GPs, $\pm 1 \sigma$ posterior intervals");
1015
1018
1016
1019
```{code-cell} ipython3
1017
1020
with model:
1018
- idata.extend(pm.sample(nuts_sampler="numpyro"))
1021
+ idata.extend(pm.sample(nuts_sampler="numpyro", random_seed=rng ))
1019
1022
```
1020
1023
1021
1024
```{code-cell} ipython3
@@ -1075,6 +1078,7 @@ And isn't this beautiful?? Now go on, and HSGP-on!
1075
1078
## Authors
1076
1079
1077
1080
* Created by [Bill Engels](https://github.com/bwengals), [Alexandre Andorra](https://github.com/AlexAndorra) and [Maxim Kochurov](https://github.com/ferrine) in 2024 ([pymc-examples#668](https://github.com/pymc-devs/pymc-examples/pull/668))
1081
+ * Use `pz.maxent` instead of `pm.find_constrained_prior`, and add random seed. [Osvaldo Martin](https://aloctavodia.github.io/). August 2024
1078
1082
1079
1083
+++
1080
1084
0 commit comments