Skip to content

Commit 842a8b8

Browse files
committed
tests pass, small bugfixes
1 parent 43e4ed4 commit 842a8b8

File tree

3 files changed

+98
-23
lines changed

3 files changed

+98
-23
lines changed

pymc3/gp/cov.py

Lines changed: 22 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -4,14 +4,17 @@
44
from functools import reduce
55
from operator import mul, add
66

7-
__all__ = ['ExpQuad',
7+
__all__ = ['Constant',
8+
'WhiteNoise',
9+
'ExpQuad',
810
'RatQuad',
911
'Exponential',
1012
'Matern52',
1113
'Matern32',
1214
'Linear',
1315
'Polynomial',
1416
'Cosine',
17+
'Periodic',
1518
'WarpedInput',
1619
'Gibbs']
1720

@@ -55,6 +58,12 @@ def __call__(self, X, Xs=None, diag=False):
5558
else:
5659
return self.full(X, Xs)
5760

61+
def diag(self, X):
62+
raise NotImplementedError
63+
64+
def full(self, X, Xs):
65+
raise NotImplementedError
66+
5867
def _slice(self, X, Xs):
5968
X = tt.as_tensor_variable(X[:, self.active_dims])
6069
if Xs is not None:
@@ -172,13 +181,13 @@ def __init__(self, sigma):
172181
self.sigma = sigma
173182

174183
def diag(self, X):
175-
return tt.square(self.sigma) * tt.ones(tt.stack([X.shape[0], ]))
184+
return tt.alloc(tt.square(self.sigma), X.shape[0])
176185

177186
def full(self, X, Xs=None):
178187
if Xs is None:
179188
return tt.square(self.sigma) * tt.eye(X.shape[0])
180189
else:
181-
return tt.zeros(tt.stack([X.shape[0], Xs.shape[0]]))
190+
return tt.alloc(0.0, X.shape[0], Xs.shape[0])
182191

183192

184193
class Stationary(Covariance):
@@ -187,9 +196,9 @@ class Stationary(Covariance):
187196
188197
Parameters
189198
----------
190-
ls : If input_dim > 1, a list or array of scalars or PyMC3 random
199+
ls : Lengthscale. If input_dim > 1, a list or array of scalars or PyMC3 random
191200
variables. If input_dim == 1, a scalar or PyMC3 random variable.
192-
ls_inv : 1 / ls. One of ls or ls_inv must be provided.
201+
ls_inv : Inverse lengthscale. 1 / ls. One of ls or ls_inv must be provided.
193202
"""
194203

195204
def __init__(self, input_dim, ls=None, ls_inv=None, active_dims=None):
@@ -222,13 +231,19 @@ def euclidean_dist(self, X, Xs):
222231

223232
def diag(self, X):
224233
return tt.alloc(1.0, X.shape[0])
225-
#return tt.ones(tt.stack([X.shape[0], ]))
226234

227235
def full(self, X, Xs=None):
228236
raise NotImplementedError
229237

230238

231239
class Periodic(Stationary):
240+
R"""
241+
The Periodic kernel.
242+
243+
.. math::
244+
k(x, x') = \mathrm{exp}\left( -\frac{2 \mathrm{sin}^2(\pi |x-x'| \frac{1}{T})}{\ell^2} \right)
245+
"""
246+
232247
def __init__(self, input_dim, period, ls=None, ls_inv=None, active_dims=None):
233248
super(Periodic, self).__init__(input_dim, ls, ls_inv, active_dims)
234249
self.period = period
@@ -490,8 +505,7 @@ def full(self, X, Xs=None):
490505
* tt.exp(-1.0 * r2 / (rx2 + rz2)))
491506

492507
def diag(self, X):
493-
return tt.ones(tt.stack([X.shape[0], ]))
494-
508+
return tt.alloc(1.0, X.shape[0])
495509

496510
def handle_args(func, args):
497511
def f(x, args):

pymc3/gp/gp.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -24,7 +24,7 @@ def __init__(self, mean_func=Zero(), cov_func=Constant(0.0)):
2424

2525
def __add__(self, other):
2626
same_attrs = set(self.__dict__.keys()) == set(other.__dict__.keys())
27-
if not isinstance(self, type(other)) and not same_attrs:
27+
if not isinstance(self, type(other)) or not same_attrs:
2828
raise ValueError("cant add different GP types")
2929
mean_total = self.mean_func + other.mean_func
3030
cov_total = self.cov_func + other.cov_func

pymc3/tests/test_gp.py

Lines changed: 75 additions & 14 deletions
Original file line numberDiff line numberDiff line change
@@ -312,6 +312,52 @@ def test_2dard(self):
312312
Kd = theano.function([], cov(X, diag=True))()
313313
npt.assert_allclose(np.diag(K), Kd, atol=1e-5)
314314

315+
def test_inv_lengthscale(self):
316+
X = np.linspace(0, 1, 10)[:, None]
317+
with pm.Model() as model:
318+
cov = pm.gp.cov.ExpQuad(1, ls_inv=10)
319+
K = theano.function([], cov(X))()
320+
npt.assert_allclose(K[0, 1], 0.53940, atol=1e-3)
321+
K = theano.function([], cov(X, X))()
322+
npt.assert_allclose(K[0, 1], 0.53940, atol=1e-3)
323+
# check diagonal
324+
Kd = theano.function([], cov(X, diag=True))()
325+
npt.assert_allclose(np.diag(K), Kd, atol=1e-5)
326+
327+
328+
class TestWhiteNoise(object):
329+
def test_1d(self):
330+
X = np.linspace(0, 1, 10)[:, None]
331+
with pm.Model() as model:
332+
cov = pm.gp.cov.WhiteNoise(sigma=0.5)
333+
K = theano.function([], cov(X))()
334+
npt.assert_allclose(K[0, 1], 0.0, atol=1e-3)
335+
npt.assert_allclose(K[0, 0], 0.5**2, atol=1e-3)
336+
# check diagonal
337+
Kd = theano.function([], cov(X, diag=True))()
338+
npt.assert_allclose(np.diag(K), Kd, atol=1e-5)
339+
# check predict
340+
K = theano.function([], cov(X, X))()
341+
npt.assert_allclose(K[0, 1], 0.0, atol=1e-3)
342+
# white noise predicting should return all zeros
343+
npt.assert_allclose(K[0, 0], 0.0, atol=1e-3)
344+
345+
346+
class TestConstant(object):
347+
def test_1d(self):
348+
X = np.linspace(0, 1, 10)[:, None]
349+
with pm.Model() as model:
350+
cov = pm.gp.cov.Constant(2.5)
351+
K = theano.function([], cov(X))()
352+
npt.assert_allclose(K[0, 1], 2.5, atol=1e-3)
353+
npt.assert_allclose(K[0, 0], 2.5, atol=1e-3)
354+
K = theano.function([], cov(X, X))()
355+
npt.assert_allclose(K[0, 1], 2.5, atol=1e-3)
356+
npt.assert_allclose(K[0, 0], 2.5, atol=1e-3)
357+
# check diagonal
358+
Kd = theano.function([], cov(X, diag=True))()
359+
npt.assert_allclose(np.diag(K), Kd, atol=1e-5)
360+
315361

316362
class TestRatQuad(object):
317363
def test_1d(self):
@@ -383,6 +429,20 @@ def test_1d(self):
383429
npt.assert_allclose(np.diag(K), Kd, atol=1e-5)
384430

385431

432+
class TestPeriodic(object):
433+
def test_1d(self):
434+
X = np.linspace(0, 1, 10)[:, None]
435+
with pm.Model() as model:
436+
cov = pm.gp.cov.Periodic(1, 0.1, 0.1)
437+
K = theano.function([], cov(X))()
438+
npt.assert_allclose(K[0, 1], 0.00288, atol=1e-3)
439+
K = theano.function([], cov(X, X))()
440+
npt.assert_allclose(K[0, 1], 0.00288, atol=1e-3)
441+
# check diagonal
442+
Kd = theano.function([], cov(X, diag=True))()
443+
npt.assert_allclose(np.diag(K), Kd, atol=1e-5)
444+
445+
386446
class TestLinear(object):
387447
def test_1d(self):
388448
X = np.linspace(0, 1, 10)[:, None]
@@ -491,7 +551,7 @@ def setup_method(self):
491551
cov_func = pm.gp.cov.ExpQuad(3, [0.1, 0.2, 0.3])
492552
mean_func = pm.gp.mean.Constant(0.5)
493553
gp = pm.gp.Marginal(mean_func, cov_func)
494-
f = gp.marginal_likelihood("f", X, y, noise=0.0)
554+
f = gp.marginal_likelihood("f", X, y, noise=0.0, is_observed=False, observed=y)
495555
p = gp.conditional("p", Xnew)
496556
self.logp = model.logp({"p": pnew})
497557
self.X = X
@@ -558,27 +618,28 @@ def testApproximations(self, approx):
558618
approx_logp = model.logp({"f": self.y, "p": self.pnew})
559619
npt.assert_allclose(approx_logp, self.logp, atol=0, rtol=1e-2)
560620

561-
def testPredictCov(self):
621+
@pytest.mark.parametrize('approx', ['FITC', 'VFE', 'DTC'])
622+
def testPredictVar(self, approx):
562623
with pm.Model() as model:
563624
cov_func = pm.gp.cov.ExpQuad(3, [0.1, 0.2, 0.3])
564625
mean_func = pm.gp.mean.Constant(0.5)
565-
gp = pm.gp.MarginalSparse(mean_func, cov_func, approx="DTC")
626+
gp = pm.gp.MarginalSparse(mean_func, cov_func, approx=approx)
566627
f = gp.marginal_likelihood("f", self.X, self.X, self.y, self.sigma)
567-
mu1, cov1 = self.gp.predict(self.Xnew, pred_noise=True)
568-
mu2, cov2 = gp.predict(self.Xnew, pred_noise=True)
628+
mu1, var1 = self.gp.predict(self.Xnew, diag=True)
629+
mu2, var2 = gp.predict(self.Xnew, diag=True)
569630
npt.assert_allclose(mu1, mu2, atol=0, rtol=1e-3)
570-
npt.assert_allclose(cov1, cov2, atol=0, rtol=1e-3)
631+
npt.assert_allclose(var1, var2, atol=0, rtol=1e-3)
571632

572-
def testPredictVar(self):
633+
def testPredictCov(self):
573634
with pm.Model() as model:
574635
cov_func = pm.gp.cov.ExpQuad(3, [0.1, 0.2, 0.3])
575636
mean_func = pm.gp.mean.Constant(0.5)
576637
gp = pm.gp.MarginalSparse(mean_func, cov_func, approx="DTC")
577-
f = gp.marginal_likelihood("f", self.X, self.X, self.y, self.sigma)
578-
mu1, var1 = self.gp.predict(self.Xnew, diag=True)
579-
mu2, var2 = gp.predict(self.Xnew, diag=True)
638+
f = gp.marginal_likelihood("f", self.X, self.X, self.y, self.sigma, is_observed=False)
639+
mu1, cov1 = self.gp.predict(self.Xnew, pred_noise=True)
640+
mu2, cov2 = gp.predict(self.Xnew, pred_noise=True)
580641
npt.assert_allclose(mu1, mu2, atol=0, rtol=1e-3)
581-
npt.assert_allclose(var1, var2, atol=0, rtol=1e-3)
642+
npt.assert_allclose(cov1, cov2, atol=0, rtol=1e-3)
582643

583644

584645
class TestGPAdditive(object):
@@ -621,7 +682,7 @@ def testAdditiveMarginal(self):
621682

622683
@pytest.mark.parametrize('approx', ['FITC', 'VFE', 'DTC'])
623684
def testAdditiveMarginalSparse(self, approx):
624-
Xu = np.random.randn(10, 1)
685+
Xu = np.random.randn(10, 3)
625686
sigma = 0.1
626687
with pm.Model() as model1:
627688
gp1 = pm.gp.MarginalSparse(self.means[0], self.covs[0], approx=approx)
@@ -669,8 +730,8 @@ def testAdditiveLatent(self):
669730
fp2 = gptot.conditional("fp2", self.Xnew)
670731

671732
fp = np.random.randn(self.Xnew.shape[0])
672-
npt.assert_allclose(fp1.logp({"fp1": fp}), fp2.logp({"fp2": fp}), atol=0, rtol=1e-2)
673-
733+
npt.assert_allclose(fp1.logp({"fsum": self.y, "fp1": fp}),
734+
fp2.logp({"fsum": self.y, "fp2": fp}), atol=0, rtol=1e-2)
674735

675736
def testAdditiveSparseRaises(self):
676737
# cant add different approximations

0 commit comments

Comments
 (0)