|
16 | 16 |
|
17 | 17 | class Base(object):
|
18 | 18 | """
|
19 |
| - Base class. Can be used as a GP placeholder object in |
20 |
| - additive models for GPs that won't be used for prediction. |
| 19 | + Base class. Can be used as a GP placeholder object in additive |
| 20 | + models for GP objects that won't be used for prediction. |
21 | 21 | """
|
22 | 22 | def __init__(self, mean_func=None, cov_func=None):
|
23 | 23 | # check if not None, args are correct subclasses.
|
@@ -69,7 +69,8 @@ class Latent(Base):
|
69 | 69 | variables representing the unknown, or latent, function whose
|
70 | 70 | distribution is the GP prior or GP conditional. This GP implementation
|
71 | 71 | can be used to implement regression on data that is not normally
|
72 |
| - distributed. |
| 72 | + distributed. For more information on the `prior` and `conditional` methods, |
| 73 | + see their docstrings. |
73 | 74 |
|
74 | 75 | Parameters
|
75 | 76 | ----------
|
@@ -103,14 +104,6 @@ class Latent(Base):
|
103 | 104 |
|
104 | 105 | with model:
|
105 | 106 | fcond = gp.conditional("fcond", Xnew=Xnew)
|
106 |
| -
|
107 |
| - Notes |
108 |
| - ----- |
109 |
| - - After initializing the GP object with a mean and covariance |
110 |
| - function, it can be added to other `Latent` GP objects. |
111 |
| -
|
112 |
| - - For more information on the `prior` and `conditional` methods, |
113 |
| - see their docstrings. |
114 | 107 | """
|
115 | 108 |
|
116 | 109 | def __init__(self, mean_func=None, cov_func=None):
|
@@ -215,8 +208,27 @@ def conditional(self, name, Xnew, n_points=None, given=None):
|
215 | 208 |
|
216 | 209 | @conditioned_vars(["X", "f", "nu"])
|
217 | 210 | class TP(Latent):
|
218 |
| - """ StudentT process |
219 |
| - https://www.cs.cmu.edu/~andrewgw/tprocess.pdf |
| 211 | + """ |
| 212 | + Implementation of a Student's T process prior. The usage is nearly |
| 213 | + identical to that of `gp.Latent`. The differences are that it must |
| 214 | + be initialized with a degrees of freedom parameter, and TP is not |
| 215 | + additive. Given a mean and covariance function, and a degrees of |
| 216 | + freedom parameter, the function $f(x)$ is modeled as, |
| 217 | +
|
| 218 | + .. math:: |
| 219 | +
|
| 220 | + f(x) \sim \mathcal{TP}\left(\mu(x), k(x, x'),\, \nu \right) |
| 221 | +
|
| 222 | + Parameters |
| 223 | + ---------- |
| 224 | + cov_func : None, 2D array, or instance of Covariance |
| 225 | + The covariance function. Defaults to zero. |
| 226 | + mean_func : None, instance of Mean |
| 227 | + The mean function. Defaults to zero. |
| 228 | + nu : float |
| 229 | + The degrees of freedom |
| 230 | +
|
| 231 | + For more information, see https://www.cs.cmu.edu/~andrewgw/tprocess.pdf |
220 | 232 | """
|
221 | 233 | def __init__(self, mean_func=None, cov_func=None, nu=None):
|
222 | 234 | if nu is None:
|
@@ -317,7 +329,8 @@ class Marginal(Base):
|
317 | 329 | The `gp.Marginal` class is an implementation of the sum of a GP
|
318 | 330 | prior and additive noise. It has `marginal_likelihood`, `conditional`
|
319 | 331 | and `predict` methods. This GP implementation can be used to
|
320 |
| - implement regression on data that is normally distributed. |
| 332 | + implement regression on data that is normally distributed. For more |
| 333 | + information on the `prior` and `conditional` methods, see their docstrings. |
321 | 334 |
|
322 | 335 | Parameters
|
323 | 336 | ----------
|
@@ -352,14 +365,6 @@ class Marginal(Base):
|
352 | 365 |
|
353 | 366 | with model:
|
354 | 367 | fcond = gp.conditional("fcond", Xnew=Xnew)
|
355 |
| -
|
356 |
| - Notes |
357 |
| - ----- |
358 |
| - - After initializing the GP object with a mean and covariance |
359 |
| - function, it can be added to other `Latent` GP objects. |
360 |
| -
|
361 |
| - - For more information on the `prior` and `conditional` methods, |
362 |
| - see their docstrings. |
363 | 368 | """
|
364 | 369 |
|
365 | 370 | def __init__(self, mean_func=None, cov_func=None):
|
@@ -542,9 +547,67 @@ def predictt(self, Xnew, diag=False, pred_noise=False, **given):
|
542 | 547 |
|
543 | 548 | @conditioned_vars(["X", "Xu", "y", "sigma"])
|
544 | 549 | class MarginalSparse(Marginal):
|
545 |
| - _available_approx = ("FITC", "VFE", "DTC") |
546 |
| - """ FITC and VFE sparse approximations |
| 550 | + R""" |
| 551 | + The `gp.MarginalSparse` class is an implementation of the sum of a GP |
| 552 | + prior and additive noise. It has `marginal_likelihood`, `conditional` |
| 553 | + and `predict` methods. This GP implementation can be used to |
| 554 | + implement regression on data that is normally distributed. The |
| 555 | + available approximations are: |
| 556 | +
|
| 557 | + - DTC: Deterministic Training Conditional |
| 558 | + - FITC: Fully independent Training Conditional |
| 559 | + - VFE: Variational Free Energy |
| 560 | +
|
| 561 | + For more information on these approximations, see e.g. |
| 562 | +
|
| 563 | + - "A unifying view of sparse approximate Gaussian process regression", |
| 564 | + 2005, Quinonero-Candela, Rasmussen |
| 565 | + - "Variational Learning of Inducing Variables in Sparse Gaussian Processes", |
| 566 | + 2009, Titsias |
| 567 | +
|
| 568 | + Parameters |
| 569 | + ---------- |
| 570 | + cov_func : None, 2D array, or instance of Covariance |
| 571 | + The covariance function. Defaults to zero. |
| 572 | + mean_func : None, instance of Mean |
| 573 | + The mean function. Defaults to zero. |
| 574 | + approx : string |
| 575 | + The approximation to use. Must be one of `VFE`, `FITC` or `DTC`. |
| 576 | +
|
| 577 | + Examples |
| 578 | + -------- |
| 579 | + .. code:: python |
| 580 | +
|
| 581 | + # A one dimensional column vector of inputs. |
| 582 | + X = np.linspace(0, 1, 10)[:, None] |
| 583 | +
|
| 584 | + # A smaller set of inducing inputs |
| 585 | + Xu = np.linspace(0, 1, 5)[:, None] |
| 586 | +
|
| 587 | + with pm.Model() as model: |
| 588 | + # Specify the covariance function. |
| 589 | + cov_func = pm.gp.cov.ExpQuad(1, lengthscales=0.1) |
| 590 | +
|
| 591 | + # Specify the GP. The default mean function is `Zero`. |
| 592 | + gp = pm.gp.Latent(cov_func=cov_func, approx="FITC") |
| 593 | +
|
| 594 | + # Place a GP prior over the function f. |
| 595 | + sigma = pm.HalfCauchy("sigma", beta=3) |
| 596 | + y_ = gp.marginal_likelihood("y", X=X, Xu=Xu, y=y, sigma=sigma) |
| 597 | +
|
| 598 | + ... |
| 599 | +
|
| 600 | + # After fitting or sampling, specify the distribution |
| 601 | + # at new points with .conditional |
| 602 | + Xnew = np.linspace(-1, 2, 50)[:, None] |
| 603 | +
|
| 604 | + with model: |
| 605 | + fcond = gp.conditional("fcond", Xnew=Xnew) |
547 | 606 | """
|
| 607 | + |
| 608 | + |
| 609 | + _available_approx = ("FITC", "VFE", "DTC") |
| 610 | + |
548 | 611 | def __init__(self, mean_func=None, cov_func=None, approx="FITC"):
|
549 | 612 | if approx not in self._available_approx:
|
550 | 613 | raise NotImplementedError(approx)
|
|
0 commit comments