|
| 1 | +"""The general, n-dimensional Gaussian noise model.""" |
| 2 | + |
| 3 | +from functools import partial |
| 4 | + |
| 5 | +import numpy as np |
| 6 | +import scipy.stats as ss |
| 7 | + |
| 8 | +import elfi |
| 9 | +from elfi.examples.gnk import euclidean_multidim |
| 10 | + |
| 11 | + |
| 12 | +def Gauss_nd(*params, cov_ii=None, cov_ij=None, n_obs=15, batch_size=1, random_state=None): |
| 13 | + """Sample the Gaussian distribution. |
| 14 | +
|
| 15 | + Reference |
| 16 | + --------- |
| 17 | + The default settings replicate the experiment settings used in [1]. |
| 18 | +
|
| 19 | + [1] arXiv:1704.00520 (Järvenpää et al., 2017). |
| 20 | +
|
| 21 | + Parameters |
| 22 | + ---------- |
| 23 | + *params : array_like |
| 24 | + The array elements correspond to the mean parameters. |
| 25 | + cov_ii : float, optional |
| 26 | + The diagonal variance. |
| 27 | + cov_ij : float, optional |
| 28 | + The non-diagonal variance. |
| 29 | + n_obs : int, optional |
| 30 | + The number of observations. |
| 31 | + batch_size : int, optional |
| 32 | + The number of batches. |
| 33 | + random_state : np.random.RandomState, optional |
| 34 | +
|
| 35 | + Returns |
| 36 | + ------- |
| 37 | + y : array_like |
| 38 | +
|
| 39 | + """ |
| 40 | + n_dim = len(params) |
| 41 | + # Formatting the mean. |
| 42 | + mu = np.zeros(shape=(batch_size, n_dim)) |
| 43 | + for idx_dim, param_mu in enumerate(params): |
| 44 | + mu[:, idx_dim] = param_mu |
| 45 | + # Formatting the diagonal covariance. |
| 46 | + cov_ii = np.repeat(cov_ii, batch_size) |
| 47 | + if batch_size == 1: |
| 48 | + cov_ii = cov_ii[None] |
| 49 | + # Formatting the non-diagonal covariance. |
| 50 | + if n_dim != 1: |
| 51 | + cov_ij = np.repeat(cov_ij, batch_size) |
| 52 | + if batch_size == 1: |
| 53 | + cov_ij = cov_ij[None] |
| 54 | + # Creating the covariance matrix. |
| 55 | + cov = np.zeros(shape=(batch_size, n_dim, n_dim)) |
| 56 | + for idx_batch in range(batch_size): |
| 57 | + if n_dim != 1: |
| 58 | + cov[idx_batch].fill(np.asscalar(cov_ij[idx_batch])) |
| 59 | + np.fill_diagonal(cov[idx_batch], cov_ii[idx_batch]) |
| 60 | + # Sampling observations. |
| 61 | + y = np.zeros(shape=(batch_size, n_obs, n_dim)) |
| 62 | + for idx_batch in range(batch_size): |
| 63 | + y_batch = ss.multivariate_normal.rvs(mean=mu[idx_batch], |
| 64 | + cov=cov[idx_batch], |
| 65 | + size=n_obs, |
| 66 | + random_state=random_state) |
| 67 | + if n_dim == 1: |
| 68 | + y_batch = y_batch[:, None] |
| 69 | + y[idx_batch, :, :] = y_batch |
| 70 | + return y |
| 71 | + |
| 72 | + |
| 73 | +def ss_mean(x): |
| 74 | + """Return the summary statistic corresponding to the mean.""" |
| 75 | + ss = np.mean(x, axis=1) |
| 76 | + return ss |
| 77 | + |
| 78 | + |
| 79 | +def ss_var(x): |
| 80 | + """Return the summary statistic corresponding to the variance.""" |
| 81 | + ss = np.var(x, axis=1) |
| 82 | + return ss |
| 83 | + |
| 84 | + |
| 85 | +def get_model(true_params=None, cov_ii=1, cov_ij=.5, n_obs=15, seed_obs=None): |
| 86 | + """Return an initialised Gaussian noise model. |
| 87 | +
|
| 88 | + Parameters |
| 89 | + ---------- |
| 90 | + true_params : array_like |
| 91 | + The array elements correspond to the mean parameters. |
| 92 | + cov_ii : float, optional |
| 93 | + The diagonal variance. |
| 94 | + cov_ij : float, optional |
| 95 | + The non-diagonal variance. |
| 96 | + n_obs : int, optional |
| 97 | + The number of observations. |
| 98 | + random_state : np.random.RandomState, optional |
| 99 | +
|
| 100 | + Returns |
| 101 | + ------- |
| 102 | + m : elfi.ElfiModel |
| 103 | +
|
| 104 | + """ |
| 105 | + # The default settings use the 2-D Gaussian model. |
| 106 | + if true_params is None: |
| 107 | + true_params = [4, 4] |
| 108 | + n_dim = len(true_params) |
| 109 | + # Obtaining the observations. |
| 110 | + y_obs = Gauss_nd(*true_params, |
| 111 | + cov_ii=cov_ii, |
| 112 | + cov_ij=cov_ij, |
| 113 | + n_obs=n_obs, |
| 114 | + random_state=np.random.RandomState(seed_obs)) |
| 115 | + sim_fn = partial( |
| 116 | + Gauss_nd, cov_ii=cov_ii, cov_ij=cov_ij, n_obs=n_obs) |
| 117 | + m = elfi.ElfiModel() |
| 118 | + # Defining the priors. |
| 119 | + priors = [] |
| 120 | + for i in range(n_dim): |
| 121 | + name_prior = 'mu_{}'.format(i) |
| 122 | + prior_mu = elfi.Prior('uniform', 0, 8, model=m, name=name_prior) |
| 123 | + priors.append(prior_mu) |
| 124 | + |
| 125 | + elfi.Simulator(sim_fn, *priors, observed=y_obs, name='gm') |
| 126 | + elfi.Summary(ss_mean, m['gm'], name='ss_mean') |
| 127 | + elfi.Summary(ss_var, m['gm'], name='ss_var') |
| 128 | + elfi.Discrepancy(euclidean_multidim, m['ss_mean'], m['ss_var'], name='d') |
| 129 | + |
| 130 | + return m |
0 commit comments