Skip to content

Commit 7c214ef

Browse files
committed
Merge remote-tracking branch 'upstream/main'
2 parents fae292f + 2d9fec2 commit 7c214ef

35 files changed

+13634
-6010
lines changed

.pre-commit-config.yaml

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -3,11 +3,11 @@ ci:
33

44
repos:
55
- repo: https://github.com/psf/black
6-
rev: 23.7.0
6+
rev: 24.4.2
77
hooks:
88
- id: black-jupyter
99
- repo: https://github.com/nbQA-dev/nbQA
10-
rev: 1.7.0
10+
rev: 1.8.5
1111
hooks:
1212
- id: nbqa-isort
1313
additional_dependencies: [isort==5.6.4]
@@ -99,7 +99,7 @@ repos:
9999
language: pygrep
100100
types_or: [markdown, rst, jupyter]
101101
- repo: https://github.com/mwouts/jupytext
102-
rev: v1.15.1
102+
rev: v1.16.3
103103
hooks:
104104
- id: jupytext
105105
files: ^examples/.+\.ipynb$

examples/data/GMB_elv_msk.tif

24.9 KB
Binary file not shown.

examples/data/gambia_dataset.csv

Lines changed: 2036 additions & 0 deletions
Large diffs are not rendered by default.

examples/diagnostics_and_criticism/model_averaging.ipynb

Lines changed: 1508 additions & 574 deletions
Large diffs are not rendered by default.

examples/diagnostics_and_criticism/model_averaging.myst.md

Lines changed: 84 additions & 218 deletions
Large diffs are not rendered by default.

examples/fundamentals/data_container.ipynb

Lines changed: 566 additions & 739 deletions
Large diffs are not rendered by default.

examples/fundamentals/data_container.myst.md

Lines changed: 37 additions & 51 deletions
Large diffs are not rendered by default.

examples/gaussian_processes/HSGP-Advanced.ipynb

Lines changed: 701 additions & 627 deletions
Large diffs are not rendered by default.

examples/gaussian_processes/HSGP-Advanced.myst.md

Lines changed: 25 additions & 21 deletions
Original file line numberDiff line numberDiff line change
@@ -5,9 +5,9 @@ jupytext:
55
format_name: myst
66
format_version: 0.13
77
kernelspec:
8-
display_name: pymc-examples
8+
display_name: pymc
99
language: python
10-
name: pymc-examples
10+
name: python3
1111
---
1212

1313
(hsgp-advanced)=
@@ -44,6 +44,7 @@ A secondary goal of this implementation is flexibility via an accessible impleme
4444
import arviz as az
4545
import matplotlib.pyplot as plt
4646
import numpy as np
47+
import preliz as pz
4748
import pymc as pm
4849
import pytensor.tensor as pt
4950
```
@@ -140,7 +141,9 @@ cov_mu = cov_trend + cov_short
140141
# Define the delta GPs
141142
n_gps = 10
142143
eta_delta_true = 3
143-
ell_delta_true = pm.draw(pm.Lognormal.dist(mu=np.log(ell_mu_short_true), sigma=0.5), draws=n_gps)
144+
ell_delta_true = pm.draw(
145+
pm.Lognormal.dist(mu=np.log(ell_mu_short_true), sigma=0.5), draws=n_gps, random_seed=rng
146+
)
144147
145148
cov_deltas = [
146149
eta_delta_true**2 * pm.gp.cov.Matern52(input_dim=1, ls=ell_i) for ell_i in ell_delta_true
@@ -166,12 +169,14 @@ def generate_gp_samples(x, cov_mu, cov_deltas, noise_dist, rng):
166169
"""
167170
n = len(x)
168171
# One draw from the mean GP
169-
f_mu_true = pm.draw(pm.MvNormal.dist(mu=np.zeros(n), cov=cov_mu(x[:, None])))
172+
f_mu_true = pm.draw(pm.MvNormal.dist(mu=np.zeros(n), cov=cov_mu(x[:, None])), random_seed=rng)
170173
171174
# Draws from the delta GPs
172175
f_deltas = []
173176
for cov_delta in cov_deltas:
174-
f_deltas.append(pm.draw(pm.MvNormal.dist(mu=np.zeros(n), cov=cov_delta(x[:, None]))))
177+
f_deltas.append(
178+
pm.draw(pm.MvNormal.dist(mu=np.zeros(n), cov=cov_delta(x[:, None])), random_seed=rng)
179+
)
175180
f_delta = np.vstack(f_deltas)
176181
177182
# The hierarchical GP
@@ -435,10 +440,9 @@ with pm.Model(coords=coords) as model:
435440
ell_mu_short = pm.Deterministic("ell_mu_short", pt.softplus(log_ell_mu_short))
436441
437442
eta_mu_trend = pm.Gamma("eta_mu_trend", mu=3.5, sigma=1)
438-
ell_mu_trend_params = pm.find_constrained_prior(
439-
pm.InverseGamma, lower=5, upper=12, mass=0.95, init_guess={"mu": 9.0, "sigma": 3.0}
443+
ell_mu_trend = pz.maxent(pz.InverseGamma(), lower=5, upper=12, mass=0.95, plot=False).to_pymc(
444+
"ell_mu_trend"
440445
)
441-
ell_mu_trend = pm.InverseGamma("ell_mu_trend", **ell_mu_trend_params)
442446
443447
## Prior for the offsets
444448
log_ell_delta_offset = pm.ZeroSumNormal("log_ell_delta_offset", dims="gp_ix")
@@ -473,7 +477,7 @@ Now, what do these priors mean? Good question. As always, it's crucial to do **p
473477
474478
```{code-cell} ipython3
475479
with model:
476-
idata = pm.sample_prior_predictive()
480+
idata = pm.sample_prior_predictive(random_seed=rng)
477481
```
478482
479483
```{code-cell} ipython3
@@ -564,7 +568,7 @@ Once we're satisfied with our priors, which is the case here, we can... sample t
564568
565569
```{code-cell} ipython3
566570
with model:
567-
idata.extend(pm.sample(nuts_sampler="numpyro", target_accept=0.9))
571+
idata.extend(pm.sample(nuts_sampler="numpyro", target_accept=0.9, random_seed=rng))
568572
```
569573
570574
```{code-cell} ipython3
@@ -669,6 +673,7 @@ with model:
669673
var_names=["f_mu", "f"],
670674
predictions=True,
671675
compile_kwargs={"mode": "NUMBA"},
676+
random_seed=rng,
672677
),
673678
)
674679
```
@@ -863,7 +868,11 @@ cov_t = pm.gp.cov.Matern52(input_dim=1, ls=ell_t_true)
863868
Kt = cov_t(t[:, None])
864869
865870
K = pt.slinalg.kron(Kx, Kt)
866-
f_true = pm.draw(pm.MvNormal.dist(mu=np.zeros(n_gps * n_t), cov=K)).reshape(n_gps, n_t).T
871+
f_true = (
872+
pm.draw(pm.MvNormal.dist(mu=np.zeros(n_gps * n_t), cov=K), random_seed=rng)
873+
.reshape(n_gps, n_t)
874+
.T
875+
)
867876
868877
# Additive gaussian noise
869878
sigma_noise = 0.5
@@ -947,17 +956,11 @@ with pm.Model() as model:
947956
Xs_x = Xx - xx_center
948957
949958
## covariance on time GP
950-
ell_t_params = pm.find_constrained_prior(
951-
pm.Lognormal, lower=0.5, upper=4.0, mass=0.95, init_guess={"mu": 1.0, "sigma": 1.0}
952-
)
953-
ell_t = pm.Lognormal("ell_t", **ell_t_params)
959+
ell_t = pz.maxent(pz.LogNormal(), lower=0.5, upper=4.0, mass=0.95, plot=False).to_pymc("ell_t")
954960
cov_t = pm.gp.cov.Matern52(1, ls=ell_t)
955961
956962
## covariance on space GP
957-
ell_x_params = pm.find_constrained_prior(
958-
pm.Lognormal, lower=0.5, upper=4.0, mass=0.95, init_guess={"mu": 1.0, "sigma": 1.0}
959-
)
960-
ell_x = pm.Lognormal("ell_x", **ell_x_params)
963+
ell_x = pz.maxent(pz.LogNormal(), lower=0.5, upper=4.0, mass=0.95, plot=False).to_pymc("ell_x")
961964
cov_x = pm.gp.cov.Matern52(1, ls=ell_x)
962965
963966
## Kronecker GP
@@ -981,7 +984,7 @@ pm.model_to_graphviz(model)
981984
982985
```{code-cell} ipython3
983986
with model:
984-
idata = pm.sample_prior_predictive()
987+
idata = pm.sample_prior_predictive(random_seed=rng)
985988
```
986989
987990
```{code-cell} ipython3
@@ -1015,7 +1018,7 @@ axs[1].set(ylim=ylims, title=r"Prior GPs, $\pm 1 \sigma$ posterior intervals");
10151018
10161019
```{code-cell} ipython3
10171020
with model:
1018-
idata.extend(pm.sample(nuts_sampler="numpyro"))
1021+
idata.extend(pm.sample(nuts_sampler="numpyro", random_seed=rng))
10191022
```
10201023
10211024
```{code-cell} ipython3
@@ -1075,6 +1078,7 @@ And isn't this beautiful?? Now go on, and HSGP-on!
10751078
## Authors
10761079
10771080
* Created by [Bill Engels](https://github.com/bwengals), [Alexandre Andorra](https://github.com/AlexAndorra) and [Maxim Kochurov](https://github.com/ferrine) in 2024 ([pymc-examples#668](https://github.com/pymc-devs/pymc-examples/pull/668))
1081+
* Use `pz.maxent` instead of `pm.find_constrained_prior`, and add random seed. [Osvaldo Martin](https://aloctavodia.github.io/). August 2024
10781082
10791083
+++
10801084

examples/gaussian_processes/HSGP-Basic.ipynb

Lines changed: 340 additions & 325 deletions
Large diffs are not rendered by default.

0 commit comments

Comments
 (0)