Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
38 changes: 3 additions & 35 deletions pymc/gp/gp.py
Original file line number Diff line number Diff line change
Expand Up @@ -39,25 +39,6 @@
__all__ = ["TP", "Latent", "LatentKron", "Marginal", "MarginalApprox", "MarginalKron"]


_noise_deprecation_warning = (
"The 'noise' parameter has been been changed to 'sigma' "
"in order to standardize the GP API and will be "
"deprecated in future releases."
)


def _handle_sigma_noise_parameters(sigma, noise):
"""Help transition of 'noise' parameter to be named 'sigma'."""
if (sigma is None and noise is None) or (sigma is not None and noise is not None):
raise ValueError("'sigma' argument must be specified.")

if sigma is None:
warnings.warn(_noise_deprecation_warning, FutureWarning)
return noise

return sigma


class Base:
"""Base class."""

Expand Down Expand Up @@ -477,8 +458,7 @@ def marginal_likelihood(
name,
X,
y,
sigma=None,
noise=None,
sigma,
jitter=JITTER_DEFAULT,
is_observed=True,
**kwargs,
Expand All @@ -505,8 +485,6 @@ def marginal_likelihood(
sigma : float, Variable, or Covariance, default ~pymc.gp.cov.WhiteNoise
Standard deviation of the Gaussian noise. Can also be a Covariance for
non-white noise.
noise : float, Variable, or Covariance, optional
Deprecated. Previous parameterization of `sigma`.
jitter : float, default 1e-6
A small correction added to the diagonal of positive semi-definite
covariance matrices to ensure numerical stability.
Expand All @@ -516,8 +494,6 @@ def marginal_likelihood(
Extra keyword arguments that are passed to :class:`~pymc.MvNormal` distribution
constructor.
"""
sigma = _handle_sigma_noise_parameters(sigma=sigma, noise=noise)

noise_func = sigma if isinstance(sigma, BaseCovariance) else pm.gp.cov.WhiteNoise(sigma)
mu, cov = self._build_marginal_likelihood(X=X, noise_func=noise_func, jitter=jitter)
self.X = X
Expand All @@ -544,10 +520,6 @@ def _get_given_vals(self, given):
cov_total = self.cov_func
mean_total = self.mean_func

if "noise" in given:
warnings.warn(_noise_deprecation_warning, FutureWarning)
given["sigma"] = given["noise"]

if all(val in given for val in ["X", "y", "sigma"]):
X, y, sigma = given["X"], given["y"], given["sigma"]
noise_func = sigma if isinstance(sigma, BaseCovariance) else pm.gp.cov.WhiteNoise(sigma)
Expand Down Expand Up @@ -804,9 +776,7 @@ def _build_marginal_likelihood_loglik(self, y, X, Xu, sigma, jitter):
quadratic = 0.5 * (pt.dot(r, r_l) - pt.dot(c, c))
return -1.0 * (constant + logdet + quadratic + trace)

def marginal_likelihood(
self, name, X, Xu, y, sigma=None, noise=None, jitter=JITTER_DEFAULT, **kwargs
):
def marginal_likelihood(self, name, X, Xu, y, sigma, jitter=JITTER_DEFAULT, **kwargs):
R"""
Return the approximate marginal likelihood distribution.

Expand All @@ -827,8 +797,6 @@ def marginal_likelihood(
noise. Must have shape `(n, )`.
sigma : float, Variable
Standard deviation of the Gaussian noise.
noise : float, Variable, optional
Previous parameterization of `sigma`.
jitter : float, default 1e-6
A small correction added to the diagonal of positive semi-definite
covariance matrices to ensure numerical stability.
Expand All @@ -840,7 +808,7 @@ def marginal_likelihood(
self.Xu = Xu
self.y = y

self.sigma = _handle_sigma_noise_parameters(sigma=sigma, noise=noise)
self.sigma = sigma

approx_loglik = self._build_marginal_likelihood_loglik(
y=self.y, X=self.X, Xu=self.Xu, sigma=self.sigma, jitter=jitter
Expand Down
78 changes: 0 additions & 78 deletions tests/gp/test_gp.py
Original file line number Diff line number Diff line change
Expand Up @@ -25,84 +25,6 @@
from pymc.math import cartesian


class TestSigmaParams:
def setup_method(self):
"""Common setup."""
self.x = np.linspace(-5, 5, 30)[:, None]
self.xu = np.linspace(-5, 5, 10)[:, None]
self.y = np.random.normal(0.25 * self.x, 0.1)

with pm.Model() as self.model:
cov_func = pm.gp.cov.Linear(1, c=0.0)
c = pm.Normal("c", mu=20.0, sigma=100.0)
mean_func = pm.gp.mean.Constant(c)
self.gp = self.gp_implementation(mean_func=mean_func, cov_func=cov_func)
self.sigma = pm.HalfNormal("sigma", sigma=100)


class TestMarginalSigmaParams(TestSigmaParams):
R"""Tests for the deprecation warnings and raising ValueError."""

gp_implementation = pm.gp.Marginal

def test_catch_warnings(self):
"""Warning from using the old noise parameter."""
with self.model:
with pytest.warns(FutureWarning):
self.gp.marginal_likelihood("lik_noise", X=self.x, y=self.y, noise=self.sigma)

with pytest.warns(FutureWarning):
self.gp.conditional(
"cond_noise",
Xnew=self.x,
given={
"noise": self.sigma,
},
)

def test_raise_value_error(self):
"""Either both or neither parameter is specified."""
with self.model:
with pytest.raises(ValueError):
self.gp.marginal_likelihood(
"like_both", X=self.x, y=self.y, noise=self.sigma, sigma=self.sigma
)

with pytest.raises(ValueError):
self.gp.marginal_likelihood("like_neither", X=self.x, y=self.y)


class TestMarginalApproxSigmaParams(TestSigmaParams):
R"""Tests for the deprecation warnings and raising ValueError"""

gp_implementation = pm.gp.MarginalApprox

@pytest.mark.xfail(reason="Possible shape problem, see #6366")
def test_catch_warnings(self):
"""Warning from using the old noise parameter."""
with self.model:
with pytest.warns(FutureWarning):
self.gp.marginal_likelihood(
"lik_noise", X=self.x, Xu=self.xu, y=self.y, noise=self.sigma
)

def test_raise_value_error(self):
"""Either both or neither parameter is specified."""
with self.model:
with pytest.raises(ValueError):
self.gp.marginal_likelihood(
"like_both",
X=self.x,
Xu=self.xu,
y=self.y,
noise=self.sigma,
sigma=self.sigma,
)

with pytest.raises(ValueError):
self.gp.marginal_likelihood("like_neither", X=self.x, Xu=self.xu, y=self.y)


class TestMarginalVsMarginalApprox:
R"""
Compare test fits of models Marginal and MarginalApprox.
Expand Down
Loading