Skip to content

Commit abcfb67

Browse files
committed
done with docstrings
1 parent 1a9e68f commit abcfb67

File tree

1 file changed

+87
-24
lines changed

1 file changed

+87
-24
lines changed

pymc3/gp/gp.py

Lines changed: 87 additions & 24 deletions
Original file line numberDiff line numberDiff line change
@@ -16,8 +16,8 @@
1616

1717
class Base(object):
1818
"""
19-
Base class. Can be used as a GP placeholder object in
20-
additive models for GPs that won't be used for prediction.
19+
Base class. Can be used as a GP placeholder object in additive
20+
models for GP objects that won't be used for prediction.
2121
"""
2222
def __init__(self, mean_func=None, cov_func=None):
2323
# check if not None, args are correct subclasses.
@@ -69,7 +69,8 @@ class Latent(Base):
6969
variables representing the unknown, or latent, function whose
7070
distribution is the GP prior or GP conditional. This GP implementation
7171
can be used to implement regression on data that is not normally
72-
distributed.
72+
distributed. For more information on the `prior` and `conditional` methods,
73+
see their docstrings.
7374
7475
Parameters
7576
----------
@@ -103,14 +104,6 @@ class Latent(Base):
103104
104105
with model:
105106
fcond = gp.conditional("fcond", Xnew=Xnew)
106-
107-
Notes
108-
-----
109-
- After initializing the GP object with a mean and covariance
110-
function, it can be added to other `Latent` GP objects.
111-
112-
- For more information on the `prior` and `conditional` methods,
113-
see their docstrings.
114107
"""
115108

116109
def __init__(self, mean_func=None, cov_func=None):
@@ -215,8 +208,27 @@ def conditional(self, name, Xnew, n_points=None, given=None):
215208

216209
@conditioned_vars(["X", "f", "nu"])
217210
class TP(Latent):
218-
""" StudentT process
219-
https://www.cs.cmu.edu/~andrewgw/tprocess.pdf
211+
"""
212+
Implementation of a Student's T process prior. The usage is nearly
213+
identical to that of `gp.Latent`. The differences are that it must
214+
be initialized with a degrees of freedom parameter, and TP is not
215+
additive. Given a mean and covariance function, and a degrees of
216+
freedom parameter, the function $f(x)$ is modeled as,
217+
218+
.. math::
219+
220+
f(x) \sim \mathcal{TP}\left(\mu(x), k(x, x'),\, \nu \right)
221+
222+
Parameters
223+
----------
224+
cov_func : None, 2D array, or instance of Covariance
225+
The covariance function. Defaults to zero.
226+
mean_func : None, instance of Mean
227+
The mean function. Defaults to zero.
228+
nu : float
229+
The degrees of freedom
230+
231+
For more information, see https://www.cs.cmu.edu/~andrewgw/tprocess.pdf
220232
"""
221233
def __init__(self, mean_func=None, cov_func=None, nu=None):
222234
if nu is None:
@@ -317,7 +329,8 @@ class Marginal(Base):
317329
The `gp.Marginal` class is an implementation of the sum of a GP
318330
prior and additive noise. It has `marginal_likelihood`, `conditional`
319331
and `predict` methods. This GP implementation can be used to
320-
implement regression on data that is normally distributed.
332+
implement regression on data that is normally distributed. For more
333+
information on the `prior` and `conditional` methods, see their docstrings.
321334
322335
Parameters
323336
----------
@@ -352,14 +365,6 @@ class Marginal(Base):
352365
353366
with model:
354367
fcond = gp.conditional("fcond", Xnew=Xnew)
355-
356-
Notes
357-
-----
358-
- After initializing the GP object with a mean and covariance
359-
function, it can be added to other `Latent` GP objects.
360-
361-
- For more information on the `prior` and `conditional` methods,
362-
see their docstrings.
363368
"""
364369

365370
def __init__(self, mean_func=None, cov_func=None):
@@ -542,9 +547,67 @@ def predictt(self, Xnew, diag=False, pred_noise=False, **given):
542547

543548
@conditioned_vars(["X", "Xu", "y", "sigma"])
544549
class MarginalSparse(Marginal):
545-
_available_approx = ("FITC", "VFE", "DTC")
546-
""" FITC and VFE sparse approximations
550+
R"""
551+
The `gp.MarginalSparse` class is an implementation of the sum of a GP
552+
prior and additive noise. It has `marginal_likelihood`, `conditional`
553+
and `predict` methods. This GP implementation can be used to
554+
implement regression on data that is normally distributed. The
555+
available approximations are:
556+
557+
- DTC: Deterministic Training Conditional
558+
- FITC: Fully independent Training Conditional
559+
- VFE: Variational Free Energy
560+
561+
For more information on these approximations, see e.g.
562+
563+
- "A unifying view of sparse approximate Gaussian process regression",
564+
2005, Quinonero-Candela, Rasmussen
565+
- "Variational Learning of Inducing Variables in Sparse Gaussian Processes",
566+
2009, Titsias
567+
568+
Parameters
569+
----------
570+
cov_func : None, 2D array, or instance of Covariance
571+
The covariance function. Defaults to zero.
572+
mean_func : None, instance of Mean
573+
The mean function. Defaults to zero.
574+
approx : string
575+
The approximation to use. Must be one of `VFE`, `FITC` or `DTC`.
576+
577+
Examples
578+
--------
579+
.. code:: python
580+
581+
# A one dimensional column vector of inputs.
582+
X = np.linspace(0, 1, 10)[:, None]
583+
584+
# A smaller set of inducing inputs
585+
Xu = np.linspace(0, 1, 5)[:, None]
586+
587+
with pm.Model() as model:
588+
# Specify the covariance function.
589+
cov_func = pm.gp.cov.ExpQuad(1, lengthscales=0.1)
590+
591+
# Specify the GP. The default mean function is `Zero`.
592+
gp = pm.gp.Latent(cov_func=cov_func, approx="FITC")
593+
594+
# Place a GP prior over the function f.
595+
sigma = pm.HalfCauchy("sigma", beta=3)
596+
y_ = gp.marginal_likelihood("y", X=X, Xu=Xu, y=y, sigma=sigma)
597+
598+
...
599+
600+
# After fitting or sampling, specify the distribution
601+
# at new points with .conditional
602+
Xnew = np.linspace(-1, 2, 50)[:, None]
603+
604+
with model:
605+
fcond = gp.conditional("fcond", Xnew=Xnew)
547606
"""
607+
608+
609+
_available_approx = ("FITC", "VFE", "DTC")
610+
548611
def __init__(self, mean_func=None, cov_func=None, approx="FITC"):
549612
if approx not in self._available_approx:
550613
raise NotImplementedError(approx)

0 commit comments

Comments
 (0)