Skip to content

Commit 5bedaa6

Browse files
fonnesbeckColCarroll
authored andcommitted
Docstring fixes in gp (#2535)
* Docstring fixes in gp * Reformatted docstrings for GP classes to adhere to numpy format
1 parent fc5c238 commit 5bedaa6

File tree

1 file changed

+39
-22
lines changed

1 file changed

+39
-22
lines changed

pymc3/gp/gp.py

Lines changed: 39 additions & 22 deletions
Original file line numberDiff line numberDiff line change
@@ -44,6 +44,8 @@ def predict(self, Xnew, point=None, given=None, diag=False):
4444
@conditioned_vars(["X", "f"])
4545
class Latent(Base):
4646
R"""
47+
Latent Gaussian process.
48+
4749
The `gp.Latent` class is a direct implementation of a GP. No addiive
4850
noise is assumed. It is called "Latent" because the underlying function
4951
values are treated as latent variables. It has a `prior` method and a
@@ -111,8 +113,10 @@ def _build_prior(self, name, X, reparameterize=True, **kwargs):
111113

112114
def prior(self, name, X, reparameterize=True, **kwargs):
113115
R"""
114-
Returns the GP prior distribution evaluated over the input
115-
locations `X`. This is the prior probability over the space
116+
Returns the GP prior distribution evaluated over the input
117+
locations `X`.
118+
119+
This is the prior probability over the space
116120
of functions described by its mean and covariance function.
117121
118122
.. math::
@@ -163,8 +167,10 @@ def _build_conditional(self, Xnew, X, f, cov_total, mean_total):
163167

164168
def conditional(self, name, Xnew, given={}, **kwargs):
165169
R"""
166-
Returns the conditional distribution evaluated over new input
167-
locations `Xnew`. Given a set of function values `f` that
170+
Returns the conditional distribution evaluated over new input
171+
locations `Xnew`.
172+
173+
Given a set of function values `f` that
168174
the GP prior was over, the conditional distribution over a
169175
set of new points, `f_*` is
170176
@@ -199,10 +205,11 @@ def conditional(self, name, Xnew, given={}, **kwargs):
199205
@conditioned_vars(["X", "f", "nu"])
200206
class TP(Latent):
201207
"""
202-
Implementation of a Student's T process prior. The usage is nearly
203-
identical to that of `gp.Latent`. The differences are that it must
204-
be initialized with a degrees of freedom parameter, and TP is not
205-
additive. Given a mean and covariance function, and a degrees of
208+
Student's T process prior.
209+
210+
The usage is nearly identical to that of `gp.Latent`. The differences
211+
are that it must be initialized with a degrees of freedom parameter, and
212+
TP is not additive. Given a mean and covariance function, and a degrees of
206213
freedom parameter, the function $f(x)$ is modeled as,
207214
208215
.. math::
@@ -245,8 +252,10 @@ def _build_prior(self, name, X, reparameterize=True, **kwargs):
245252

246253
def prior(self, name, X, reparameterize=True, **kwargs):
247254
R"""
248-
Returns the TP prior distribution evaluated over the input
249-
locations `X`. This is the prior probability over the space
255+
Returns the TP prior distribution evaluated over the input
256+
locations `X`.
257+
258+
This is the prior probability over the space
250259
of functions described by its mean and covariance function.
251260
252261
Parameters
@@ -283,8 +292,10 @@ def _build_conditional(self, Xnew, X, f):
283292

284293
def conditional(self, name, Xnew, **kwargs):
285294
R"""
286-
Returns the conditional distribution evaluated over new input
287-
locations `Xnew`. Given a set of function values `f` that
295+
Returns the conditional distribution evaluated over new input
296+
locations `Xnew`.
297+
298+
Given a set of function values `f` that
288299
the TP prior was over, the conditional distribution over a
289300
set of new points, `f_*` is
290301
@@ -310,6 +321,8 @@ def conditional(self, name, Xnew, **kwargs):
310321
@conditioned_vars(["X", "y", "noise"])
311322
class Marginal(Base):
312323
R"""
324+
Marginal Gaussian process.
325+
313326
The `gp.Marginal` class is an implementation of the sum of a GP
314327
prior and additive noise. It has `marginal_likelihood`, `conditional`
315328
and `predict` methods. This GP implementation can be used to
@@ -335,7 +348,7 @@ class Marginal(Base):
335348
cov_func = pm.gp.cov.ExpQuad(1, ls=0.1)
336349
337350
# Specify the GP. The default mean function is `Zero`.
338-
gp = pm.gp.Latent(cov_func=cov_func)
351+
gp = pm.gp.Marginal(cov_func=cov_func)
339352
340353
# Place a GP prior over the function f.
341354
sigma = pm.HalfCauchy("sigma", beta=3)
@@ -363,9 +376,10 @@ def _build_marginal_likelihood(self, X, noise):
363376

364377
def marginal_likelihood(self, name, X, y, noise, is_observed=True, **kwargs):
365378
R"""
366-
Returns the marginal likelihood distribution, given the input
367-
locations `X` and the data `y`. This is integral over the product of the GP
368-
prior and a normal likelihood.
379+
Returns the marginal likelihood distribution, given the input
380+
locations `X` and the data `y`.
381+
382+
This is integral over the product of the GP prior and a normal likelihood.
369383
370384
.. math::
371385
@@ -445,10 +459,11 @@ def _build_conditional(self, Xnew, pred_noise, diag, X, y, noise,
445459

446460
def conditional(self, name, Xnew, pred_noise=False, given={}, **kwargs):
447461
R"""
448-
Returns the conditional distribution evaluated over new input
449-
locations `Xnew`. Given a set of function values `f` that
450-
the GP prior was over, the conditional distribution over a
451-
set of new points, `f_*` is
462+
Returns the conditional distribution evaluated over new input
463+
locations `Xnew`.
464+
465+
Given a set of function values `f` that the GP prior was over, the
466+
conditional distribution over a set of new points, `f_*` is:
452467
453468
.. math::
454469
@@ -535,6 +550,8 @@ def predictt(self, Xnew, diag=False, pred_noise=False, given={}):
535550
@conditioned_vars(["X", "Xu", "y", "sigma"])
536551
class MarginalSparse(Marginal):
537552
R"""
553+
Approximate marginal Gaussian process.
554+
538555
The `gp.MarginalSparse` class is an implementation of the sum of a GP
539556
prior and additive noise. It has `marginal_likelihood`, `conditional`
540557
and `predict` methods. This GP implementation can be used to
@@ -638,7 +655,7 @@ def _build_marginal_likelihood_logp(self, X, Xu, y, sigma):
638655

639656
def marginal_likelihood(self, name, X, Xu, y, sigma, is_observed=True, **kwargs):
640657
R"""
641-
Returns the approximate marginal likelihood distribution, given the input
658+
Returns the approximate marginal likelihood distribution, given the input
642659
locations `X`, inducing point locations `Xu`, data `y`, and white noise
643660
standard deviations `sigma`.
644661
@@ -724,7 +741,7 @@ def _get_given_vals(self, given):
724741

725742
def conditional(self, name, Xnew, pred_noise=False, given={}, **kwargs):
726743
R"""
727-
Returns the approximate conditional distribution of the GP evaluated over
744+
Returns the approximate conditional distribution of the GP evaluated over
728745
new input locations `Xnew`.
729746
730747
Parameters

0 commit comments

Comments
 (0)