1
1
# pylint:disable=unused-variable
2
- from .helpers import SeededTest
3
2
from functools import reduce
4
3
from operator import add
5
4
import pymc3 as pm
9
8
import numpy .testing as npt
10
9
import pytest
11
10
11
+ np .random .seed (101 )
12
12
13
13
class TestZeroMean (object ):
14
14
def test_value (self ):
@@ -483,12 +483,12 @@ class TestMarginalVsLatent(object):
483
483
Compare the logp of models Marginal, noise=0 and Latent.
484
484
"""
485
485
def setup_method (self ):
486
- X = np .random .randn (20 ,3 )
487
- y = np .random .randn (20 )
488
- Xnew = np .random .randn (200 , 3 )
489
- pnew = np .random .randn (200 )
486
+ X = np .random .randn (50 ,3 )
487
+ y = np .random .randn (50 ) * 0.01
488
+ Xnew = np .random .randn (60 , 3 )
489
+ pnew = np .random .randn (60 ) * 0.01
490
490
with pm .Model () as model :
491
- cov_func = pm .gp .cov .ExpQuad (3 , [1 , 2 , 3 ])
491
+ cov_func = pm .gp .cov .ExpQuad (3 , [0.1 , 0.2 , 0. 3 ])
492
492
mean_func = pm .gp .mean .Constant (0.5 )
493
493
gp = pm .gp .Marginal (mean_func , cov_func )
494
494
f = gp .marginal_likelihood ("f" , X , y , noise = 0.0 )
@@ -501,25 +501,25 @@ def setup_method(self):
501
501
502
502
def testLatent1 (self ):
503
503
with pm .Model () as model :
504
- cov_func = pm .gp .cov .ExpQuad (3 , [1 , 2 , 3 ])
504
+ cov_func = pm .gp .cov .ExpQuad (3 , [0.1 , 0.2 , 0. 3 ])
505
505
mean_func = pm .gp .mean .Constant (0.5 )
506
506
gp = pm .gp .Latent (mean_func , cov_func )
507
507
f = gp .prior ("f" , self .X , reparameterize = False )
508
508
p = gp .conditional ("p" , self .Xnew )
509
509
latent_logp = model .logp ({"f" : self .y , "p" : self .pnew })
510
- npt .assert_allclose (latent_logp , self .logp , atol = 0 , rtol = 1e-3 )
510
+ npt .assert_allclose (latent_logp , self .logp , atol = 0 , rtol = 1e-2 )
511
511
512
512
def testLatent2 (self ):
513
513
with pm .Model () as model :
514
- cov_func = pm .gp .cov .ExpQuad (3 , [1 , 2 , 3 ])
514
+ cov_func = pm .gp .cov .ExpQuad (3 , [0.1 , 0.2 , 0. 3 ])
515
515
mean_func = pm .gp .mean .Constant (0.5 )
516
516
gp = pm .gp .Latent (mean_func , cov_func )
517
517
f = gp .prior ("f" , self .X , reparameterize = True )
518
518
p = gp .conditional ("p" , self .Xnew )
519
519
chol = np .linalg .cholesky (cov_func (self .X ).eval ())
520
520
y_rotated = np .linalg .solve (chol , self .y - 0.5 )
521
521
latent_logp = model .logp ({"f_rotated_" : y_rotated , "p" : self .pnew })
522
- npt .assert_allclose (latent_logp , self .logp , atol = 0 , rtol = 1e-3 )
522
+ npt .assert_allclose (latent_logp , self .logp , atol = 0 , rtol = 1e-2 )
523
523
524
524
525
525
class TestMarginalVsMarginalSparse (object ):
@@ -528,12 +528,12 @@ class TestMarginalVsMarginalSparse(object):
528
528
Should be nearly equal when inducing points are same as inputs.
529
529
"""
530
530
def setup_method (self ):
531
- X = np .random .randn (20 ,3 )
532
- y = np .random .randn (20 )
533
- Xnew = np .random .randn (200 , 3 )
534
- pnew = np .random .randn (200 )
531
+ X = np .random .randn (50 ,3 )
532
+ y = np .random .randn (50 ) * 0.01
533
+ Xnew = np .random .randn (60 , 3 )
534
+ pnew = np .random .randn (60 ) * 0.01
535
535
with pm .Model () as model :
536
- cov_func = pm .gp .cov .ExpQuad (3 , [1 , 2 , 3 ])
536
+ cov_func = pm .gp .cov .ExpQuad (3 , [0.1 , 0.2 , 0. 3 ])
537
537
mean_func = pm .gp .mean .Constant (0.5 )
538
538
gp = pm .gp .Marginal (mean_func , cov_func )
539
539
sigma = 0.1
@@ -545,33 +545,56 @@ def setup_method(self):
545
545
self .y = y
546
546
self .sigma = sigma
547
547
self .pnew = pnew
548
+ self .gp = gp
548
549
549
550
@pytest .mark .parametrize ('approx' , ['FITC' , 'VFE' , 'DTC' ])
550
551
def testApproximations (self , approx ):
551
552
with pm .Model () as model :
552
- cov_func = pm .gp .cov .ExpQuad (3 , [1 , 2 , 3 ])
553
+ cov_func = pm .gp .cov .ExpQuad (3 , [0.1 , 0.2 , 0. 3 ])
553
554
mean_func = pm .gp .mean .Constant (0.5 )
554
555
gp = pm .gp .MarginalSparse (mean_func , cov_func , approx = approx )
555
556
f = gp .marginal_likelihood ("f" , self .X , self .X , self .y , self .sigma )
556
557
p = gp .conditional ("p" , self .Xnew )
557
558
approx_logp = model .logp ({"f" : self .y , "p" : self .pnew })
558
- npt .assert_allclose (approx_logp , self .logp , atol = 0 , rtol = 1e-3 )
559
+ npt .assert_allclose (approx_logp , self .logp , atol = 0 , rtol = 1e-2 )
560
+
561
+ def testPredictCov (self ):
562
+ with pm .Model () as model :
563
+ cov_func = pm .gp .cov .ExpQuad (3 , [0.1 , 0.2 , 0.3 ])
564
+ mean_func = pm .gp .mean .Constant (0.5 )
565
+ gp = pm .gp .MarginalSparse (mean_func , cov_func , approx = "DTC" )
566
+ f = gp .marginal_likelihood ("f" , self .X , self .X , self .y , self .sigma )
567
+ mu1 , cov1 = self .gp .predict (self .Xnew , pred_noise = True )
568
+ mu2 , cov2 = gp .predict (self .Xnew , pred_noise = True )
569
+ npt .assert_allclose (mu1 , mu2 , atol = 0 , rtol = 1e-3 )
570
+ npt .assert_allclose (cov1 , cov2 , atol = 0 , rtol = 1e-3 )
571
+
572
+ def testPredictVar (self ):
573
+ with pm .Model () as model :
574
+ cov_func = pm .gp .cov .ExpQuad (3 , [0.1 , 0.2 , 0.3 ])
575
+ mean_func = pm .gp .mean .Constant (0.5 )
576
+ gp = pm .gp .MarginalSparse (mean_func , cov_func , approx = "DTC" )
577
+ f = gp .marginal_likelihood ("f" , self .X , self .X , self .y , self .sigma )
578
+ mu1 , var1 = self .gp .predict (self .Xnew , diag = True )
579
+ mu2 , var2 = gp .predict (self .Xnew , diag = True )
580
+ npt .assert_allclose (mu1 , mu2 , atol = 0 , rtol = 1e-3 )
581
+ npt .assert_allclose (var1 , var2 , atol = 0 , rtol = 1e-3 )
559
582
560
583
561
584
class TestGPAdditive (object ):
562
585
def setup_method (self ):
563
- self .X = np .random .randn (20 ,3 )
564
- self .y = np .random .randn (20 )
565
- self .Xnew = np .random .randn (200 , 3 )
586
+ self .X = np .random .randn (50 ,3 )
587
+ self .y = np .random .randn (50 ) * 0.01
588
+ self .Xnew = np .random .randn (60 , 3 )
566
589
self .noise = pm .gp .cov .WhiteNoise (0.1 )
567
- self .covs = (pm .gp .cov .ExpQuad (3 , [1 , 2 , 3 ]),
568
- pm .gp .cov .ExpQuad (3 , [1 , 2 , 3 ]),
569
- pm .gp .cov .ExpQuad (3 , [1 , 2 , 3 ]))
590
+ self .covs = (pm .gp .cov .ExpQuad (3 , [0.1 , 0.2 , 0. 3 ]),
591
+ pm .gp .cov .ExpQuad (3 , [0.1 , 0.2 , 0. 3 ]),
592
+ pm .gp .cov .ExpQuad (3 , [0.1 , 0.2 , 0. 3 ]))
570
593
self .means = (pm .gp .mean .Constant (0.5 ),
571
594
pm .gp .mean .Constant (0.5 ),
572
595
pm .gp .mean .Constant (0.5 ))
573
596
574
- def testAddMarginal (self ):
597
+ def testAdditiveMarginal (self ):
575
598
with pm .Model () as model1 :
576
599
gp1 = pm .gp .Marginal (self .means [0 ], self .covs [0 ])
577
600
gp2 = pm .gp .Marginal (self .means [1 ], self .covs [1 ])
@@ -585,7 +608,7 @@ def testAddMarginal(self):
585
608
gptot = pm .gp .Marginal (reduce (add , self .means ), reduce (add , self .covs ))
586
609
fsum = gptot .marginal_likelihood ("f" , self .X , self .y , noise = self .noise )
587
610
model2_logp = model2 .logp ({"fsum" : self .y })
588
- npt .assert_allclose (model1_logp , model2_logp , atol = 0 , rtol = 1e-3 )
611
+ npt .assert_allclose (model1_logp , model2_logp , atol = 0 , rtol = 1e-2 )
589
612
590
613
with model1 :
591
614
fp1 = gpsum .conditional ("fp1" , self .Xnew , given = {"X" : self .X , "y" : self .y ,
@@ -594,13 +617,145 @@ def testAddMarginal(self):
594
617
fp2 = gptot .conditional ("fp2" , self .Xnew )
595
618
596
619
fp = np .random .randn (self .Xnew .shape [0 ])
597
- npt .assert_allclose (fp1 .logp ({"fp1" : fp }), fp2 .logp ({"fp2" : fp }), atol = 0 , rtol = 1e-3 )
620
+ npt .assert_allclose (fp1 .logp ({"fp1" : fp }), fp2 .logp ({"fp2" : fp }), atol = 0 , rtol = 1e-2 )
621
+
622
+ @pytest .mark .parametrize ('approx' , ['FITC' , 'VFE' , 'DTC' ])
623
+ def testAdditiveMarginalSparse (self , approx ):
624
+ Xu = np .random .randn (10 , 1 )
625
+ sigma = 0.1
626
+ with pm .Model () as model1 :
627
+ gp1 = pm .gp .MarginalSparse (self .means [0 ], self .covs [0 ], approx = approx )
628
+ gp2 = pm .gp .MarginalSparse (self .means [1 ], self .covs [1 ], approx = approx )
629
+ gp3 = pm .gp .MarginalSparse (self .means [2 ], self .covs [2 ], approx = approx )
630
+
631
+ gpsum = gp1 + gp2 + gp3
632
+ fsum = gpsum .marginal_likelihood ("f" , self .X , Xu , self .y , sigma = sigma )
633
+ model1_logp = model1 .logp ({"fsum" : self .y })
634
+
635
+ with pm .Model () as model2 :
636
+ gptot = pm .gp .MarginalSparse (reduce (add , self .means ), reduce (add , self .covs ), approx = approx )
637
+ fsum = gptot .marginal_likelihood ("f" , self .X , Xu , self .y , sigma = sigma )
638
+ model2_logp = model2 .logp ({"fsum" : self .y })
639
+ npt .assert_allclose (model1_logp , model2_logp , atol = 0 , rtol = 1e-2 )
640
+
641
+ with model1 :
642
+ fp1 = gpsum .conditional ("fp1" , self .Xnew , given = {"X" : self .X , "Xu" : Xu , "y" : self .y ,
643
+ "sigma" : sigma , "gp" : gpsum })
644
+ with model2 :
645
+ fp2 = gptot .conditional ("fp2" , self .Xnew )
646
+
647
+ fp = np .random .randn (self .Xnew .shape [0 ])
648
+ npt .assert_allclose (fp1 .logp ({"fp1" : fp }), fp2 .logp ({"fp2" : fp }), atol = 0 , rtol = 1e-2 )
649
+
650
+ def testAdditiveLatent (self ):
651
+ with pm .Model () as model1 :
652
+ gp1 = pm .gp .Latent (self .means [0 ], self .covs [0 ])
653
+ gp2 = pm .gp .Latent (self .means [1 ], self .covs [1 ])
654
+ gp3 = pm .gp .Latent (self .means [2 ], self .covs [2 ])
655
+
656
+ gpsum = gp1 + gp2 + gp3
657
+ fsum = gpsum .prior ("fsum" , self .X , reparameterize = False )
658
+ model1_logp = model1 .logp ({"fsum" : self .y })
659
+
660
+ with pm .Model () as model2 :
661
+ gptot = pm .gp .Latent (reduce (add , self .means ), reduce (add , self .covs ))
662
+ fsum = gptot .prior ("fsum" , self .X , reparameterize = False )
663
+ model2_logp = model2 .logp ({"fsum" : self .y })
664
+ npt .assert_allclose (model1_logp , model2_logp , atol = 0 , rtol = 1e-2 )
665
+
666
+ with model1 :
667
+ fp1 = gpsum .conditional ("fp1" , self .Xnew , given = {"X" : self .X , "f" : self .y , "gp" : gpsum })
668
+ with model2 :
669
+ fp2 = gptot .conditional ("fp2" , self .Xnew )
670
+
671
+ fp = np .random .randn (self .Xnew .shape [0 ])
672
+ npt .assert_allclose (fp1 .logp ({"fp1" : fp }), fp2 .logp ({"fp2" : fp }), atol = 0 , rtol = 1e-2 )
673
+
674
+
675
+ def testAdditiveSparseRaises (self ):
676
+ # cant add different approximations
677
+ with pm .Model () as model :
678
+ cov_func = pm .gp .cov .ExpQuad (3 , [0.1 , 0.2 , 0.3 ])
679
+ gp1 = pm .gp .MarginalSparse (cov_func = cov_func , approx = "DTC" )
680
+ gp2 = pm .gp .MarginalSparse (cov_func = cov_func , approx = "FITC" )
681
+ with pytest .raises (Exception ) as e_info :
682
+ gp1 + gp2
683
+
684
+ def testAdditiveTypeRaises1 (self ):
685
+ with pm .Model () as model :
686
+ cov_func = pm .gp .cov .ExpQuad (3 , [0.1 , 0.2 , 0.3 ])
687
+ gp1 = pm .gp .MarginalSparse (cov_func = cov_func , approx = "DTC" )
688
+ gp2 = pm .gp .Marginal (cov_func = cov_func )
689
+ with pytest .raises (Exception ) as e_info :
690
+ gp1 + gp2
691
+
692
+ def testAdditiveTypeRaises2 (self ):
693
+ with pm .Model () as model :
694
+ cov_func = pm .gp .cov .ExpQuad (3 , [0.1 , 0.2 , 0.3 ])
695
+ gp1 = pm .gp .Latent (cov_func = cov_func )
696
+ gp2 = pm .gp .Marginal (cov_func = cov_func )
697
+ with pytest .raises (Exception ) as e_info :
698
+ gp1 + gp2
598
699
599
700
600
701
class TestTP (object ):
601
702
R"""
602
703
Compare TP with high degress of freedom to GP
603
704
"""
604
705
def setup_method (self ):
605
- pass
706
+ X = np .random .randn (20 ,3 )
707
+ y = np .random .randn (20 )* 0.01
708
+ Xnew = np .random .randn (50 , 3 )
709
+ pnew = np .random .randn (50 )* 0.01
710
+ with pm .Model () as model :
711
+ cov_func = pm .gp .cov .ExpQuad (3 , [0.1 , 0.2 , 0.3 ])
712
+ gp = pm .gp .Latent (cov_func = cov_func )
713
+ f = gp .prior ("f" , X , reparameterize = False )
714
+ p = gp .conditional ("p" , Xnew )
715
+ self .X = X
716
+ self .y = y
717
+ self .Xnew = Xnew
718
+ self .pnew = pnew
719
+ self .latent_logp = model .logp ({"f" : y , "p" : pnew })
720
+ self .plogp = p .logp ({"f" : y , "p" : pnew })
721
+
722
+ def testTPvsLatent (self ):
723
+ with pm .Model () as model :
724
+ cov_func = pm .gp .cov .ExpQuad (3 , [0.1 , 0.2 , 0.3 ])
725
+ tp = pm .gp .TP (cov_func = cov_func , nu = 10000 )
726
+ f = tp .prior ("f" , self .X , reparameterize = False )
727
+ p = tp .conditional ("p" , self .Xnew )
728
+ tp_logp = model .logp ({"f" : self .y , "p" : self .pnew })
729
+ npt .assert_allclose (self .latent_logp , tp_logp , atol = 0 , rtol = 1e-2 )
730
+
731
+ def testTPvsLatentReparameterized (self ):
732
+ with pm .Model () as model :
733
+ cov_func = pm .gp .cov .ExpQuad (3 , [0.1 , 0.2 , 0.3 ])
734
+ tp = pm .gp .TP (cov_func = cov_func , nu = 10000 )
735
+ f = tp .prior ("f" , self .X , reparameterize = True )
736
+ p = tp .conditional ("p" , self .Xnew )
737
+ chol = np .linalg .cholesky (cov_func (self .X ).eval ())
738
+ y_rotated = np .linalg .solve (chol , self .y )
739
+ # testing full model logp unreliable due to introduction of chi2__log__
740
+ plogp = p .logp ({"f_rotated_" : y_rotated , "p" : self .pnew , "chi2__log__" : np .log (1e20 )})
741
+ npt .assert_allclose (self .plogp , plogp , atol = 0 , rtol = 1e-2 )
742
+
743
+ def testAdditiveTPRaises (self ):
744
+ with pm .Model () as model :
745
+ cov_func = pm .gp .cov .ExpQuad (3 , [0.1 , 0.2 , 0.3 ])
746
+ gp1 = pm .gp .TP (cov_func = cov_func , nu = 10 )
747
+ gp2 = pm .gp .TP (cov_func = cov_func , nu = 10 )
748
+ with pytest .raises (Exception ) as e_info :
749
+ gp1 + gp2
750
+
751
+
752
+
753
+
754
+
755
+
756
+
757
+
758
+
759
+
760
+
606
761
0 commit comments