|
11 | 11 | import torch
|
12 | 12 | from botorch.models.converter import batched_to_model_list
|
13 | 13 | from botorch.models.deterministic import DeterministicModel
|
14 |
| -from botorch.models.gp_regression import SingleTaskGP |
| 14 | +from botorch.models.gp_regression import FixedNoiseGP, SingleTaskGP |
15 | 15 | from botorch.models.model import ModelList
|
16 | 16 | from botorch.models.multitask import MultiTaskGP
|
17 | 17 | from botorch.models.transforms.input import Normalize
|
@@ -644,3 +644,20 @@ def test_get_gp_samples(self):
|
644 | 644 | expected = torch.Size([13, 5, 3, m])
|
645 | 645 | Y_batched = gp_samples.posterior(test_X).mean
|
646 | 646 | self.assertEqual(Y_batched.shape, expected)
|
| 647 | + |
| 648 | + def test_with_fixed_noise(self): |
| 649 | + for n_samples in (1, 20): |
| 650 | + gp_samples = get_gp_samples( |
| 651 | + model=FixedNoiseGP( |
| 652 | + torch.rand(5, 3, dtype=torch.double), |
| 653 | + torch.randn(5, 1, dtype=torch.double), |
| 654 | + torch.rand(5, 1, dtype=torch.double) * 0.1, |
| 655 | + ), |
| 656 | + num_outputs=1, |
| 657 | + n_samples=n_samples, |
| 658 | + ) |
| 659 | + samples = gp_samples(torch.rand(2, 3)) |
| 660 | + expected_shape = ( |
| 661 | + torch.Size([2, 1]) if n_samples == 1 else torch.Size([n_samples, 2, 1]) |
| 662 | + ) |
| 663 | + self.assertEqual(samples.shape, expected_shape) |
0 commit comments