|
12 | 12 | HeteroskedasticSingleTaskGP, |
13 | 13 | SingleTaskGP, |
14 | 14 | ) |
| 15 | +from botorch.models.utils import add_output_dim |
15 | 16 | from botorch.posteriors import GPyTorchPosterior |
16 | 17 | from botorch.sampling import SobolQMCNormalSampler |
17 | 18 | from gpytorch.kernels import MaternKernel, ScaleKernel |
@@ -82,20 +83,39 @@ def test_gp(self, cuda=False): |
82 | 83 | # test posterior |
83 | 84 | # test non batch evaluation |
84 | 85 | X = torch.rand(batch_shape + torch.Size([3, 1]), **tkwargs) |
| 86 | + expected_mean_shape = batch_shape + torch.Size([3, num_outputs]) |
85 | 87 | posterior = model.posterior(X) |
86 | 88 | self.assertIsInstance(posterior, GPyTorchPosterior) |
87 | | - self.assertEqual( |
88 | | - posterior.mean.shape, batch_shape + torch.Size([3, num_outputs]) |
| 89 | + self.assertEqual(posterior.mean.shape, expected_mean_shape) |
| 90 | + # test adding observation noise |
| 91 | + posterior_pred = model.posterior(X, observation_noise=True) |
| 92 | + self.assertIsInstance(posterior_pred, GPyTorchPosterior) |
| 93 | + self.assertEqual(posterior_pred.mean.shape, expected_mean_shape) |
| 94 | + pvar = posterior_pred.variance |
| 95 | + pvar_exp = _get_pvar_expected(posterior, model, X, num_outputs) |
| 96 | + self.assertTrue( |
| 97 | + torch.allclose(pvar, pvar_exp, rtol=1e-4, atol=1e-06) |
89 | 98 | ) |
| 99 | + |
90 | 100 | # test batch evaluation |
91 | 101 | X = torch.rand( |
92 | 102 | torch.Size([2]) + batch_shape + torch.Size([3, 1]), **tkwargs |
93 | 103 | ) |
| 104 | + expected_mean_shape = ( |
| 105 | + torch.Size([2]) + batch_shape + torch.Size([3, num_outputs]) |
| 106 | + ) |
| 107 | + |
94 | 108 | posterior = model.posterior(X) |
95 | 109 | self.assertIsInstance(posterior, GPyTorchPosterior) |
96 | | - self.assertEqual( |
97 | | - posterior.mean.shape, |
98 | | - torch.Size([2]) + batch_shape + torch.Size([3, num_outputs]), |
| 110 | + self.assertEqual(posterior.mean.shape, expected_mean_shape) |
| 111 | + # test adding observation noise in batch mode |
| 112 | + posterior_pred = model.posterior(X, observation_noise=True) |
| 113 | + self.assertIsInstance(posterior_pred, GPyTorchPosterior) |
| 114 | + self.assertEqual(posterior_pred.mean.shape, expected_mean_shape) |
| 115 | + pvar = posterior_pred.variance |
| 116 | + pvar_exp = _get_pvar_expected(posterior, model, X, num_outputs) |
| 117 | + self.assertTrue( |
| 118 | + torch.allclose(pvar, pvar_exp, rtol=1e-4, atol=1e-06) |
99 | 119 | ) |
100 | 120 |
|
101 | 121 | def test_gp_cuda(self): |
@@ -324,3 +344,13 @@ def test_condition_on_observations(self, cuda=False): |
324 | 344 | def test_fantasize(self, cuda=False): |
325 | 345 | with self.assertRaises(NotImplementedError): |
326 | 346 | super().test_fantasize(cuda=cuda) |
| 347 | + |
| 348 | + |
| 349 | +def _get_pvar_expected(posterior, model, X, num_outputs): |
| 350 | + if num_outputs == 1: |
| 351 | + return model.likelihood(posterior.mvn, X).variance.unsqueeze(-1) |
| 352 | + X_, odi = add_output_dim(X=X, original_batch_shape=model._input_batch_shape) |
| 353 | + pvar_exp = model.likelihood(model(X_), X_).variance |
| 354 | + return torch.stack( |
| 355 | + [pvar_exp.select(dim=odi, index=i) for i in range(num_outputs)], dim=-1 |
| 356 | + ) |
0 commit comments