Skip to content

Commit a0c8337

Browse files
Ethan Chefacebook-github-bot
authored andcommitted
Remove_outcome_transform=Standardize for SingleTaskGP [botorch tests] (#2965)
Summary: Pull Request resolved: #2965 SingleTaskGP standardizes outcomes by default; updated botorch tests to remove redundant standardizations Reviewed By: saitcakmak Differential Revision: D80032173 fbshipit-source-id: f1fdffb2a3e403386a545f964d9a96325adc4f49
1 parent 20f1116 commit a0c8337

File tree

9 files changed

+6
-27
lines changed

9 files changed

+6
-27
lines changed

botorch/cross_validation.py

Lines changed: 0 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -154,17 +154,13 @@ def batch_cross_validation(
154154
>>> train_Y = torch.rand_like(train_X)
155155
>>> cv_folds = gen_loo_cv_folds(train_X, train_Y)
156156
>>> input_transform = Normalize(d=train_X.shape[-1])
157-
>>> outcome_transform = Standardize(
158-
... m=train_Y.shape[-1], batch_shape=cv_folds.train_Y.shape[:-2]
159-
... )
160157
>>>
161158
>>> cv_results = batch_cross_validation(
162159
... model_cls=SingleTaskGP,
163160
... mll_cls=ExactMarginalLogLikelihood,
164161
... cv_folds=cv_folds,
165162
... model_init_kwargs={
166163
... "input_transform": input_transform,
167-
... "outcome_transform": outcome_transform,
168164
... },
169165
... )
170166
"""

test/acquisition/multi_objective/test_monte_carlo.py

Lines changed: 1 addition & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -41,7 +41,6 @@
4141
)
4242
from botorch.models.gp_regression import SingleTaskGP
4343
from botorch.models.transforms.input import InputPerturbation
44-
from botorch.models.transforms.outcome import Standardize
4544
from botorch.posteriors.posterior_list import PosteriorList
4645
from botorch.posteriors.transformed import TransformedPosterior
4746
from botorch.sampling.list_sampler import ListSampler
@@ -1718,9 +1717,7 @@ def test_cache_root_w_standardize(self):
17181717
with self.subTest(acqf_class.__name__):
17191718
train_x = torch.rand(3, 2, dtype=torch.float64)
17201719
train_y = torch.randn(3, 2, dtype=torch.float64)
1721-
model = SingleTaskGP(
1722-
train_x, train_y, outcome_transform=Standardize(m=2)
1723-
)
1720+
model = SingleTaskGP(train_x, train_y)
17241721
with catch_warnings():
17251722
simplefilter("ignore", category=NumericsWarning)
17261723
acqf = acqf_class(

test/models/test_model_list_gp_regression.py

Lines changed: 5 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -562,9 +562,10 @@ def test_fantasize_with_outcome_transform(self) -> None:
562562
Y = torch.cat([Y1, Y2], dim=-1)
563563
target_x = torch.tensor([[0.5]], **tkwargs)
564564

565+
# Default behavior is to Standardize outcomes
565566
model_with_transform = ModelListGP(
566-
SingleTaskGP(X, Y1, outcome_transform=Standardize(m=1)),
567-
SingleTaskGP(X, Y2, outcome_transform=Standardize(m=1)),
567+
SingleTaskGP(X, Y1),
568+
SingleTaskGP(X, Y2),
568569
)
569570
outcome_transform = Standardize(m=2)
570571
y_standardized, _ = outcome_transform(Y)
@@ -682,8 +683,8 @@ def test_fantasize_with_outcome_transform_fixed_noise(self) -> None:
682683
yvar = torch.full_like(Y, 1e-4)
683684
yvar2 = 2 * yvar
684685
model = ModelListGP(
685-
SingleTaskGP(X, Y, yvar, outcome_transform=Standardize(m=1)),
686-
SingleTaskGP(X, Y2, yvar2, outcome_transform=Standardize(m=1)),
686+
SingleTaskGP(X, Y, yvar),
687+
SingleTaskGP(X, Y2, yvar2),
687688
)
688689
# test exceptions
689690
eval_mask = torch.zeros(

test/models/test_relevance_pursuit.py

Lines changed: 0 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -34,7 +34,6 @@
3434
RobustRelevancePursuitSingleTaskGP,
3535
)
3636
from botorch.models.transforms.input import Normalize
37-
from botorch.models.transforms.outcome import Standardize
3837
from botorch.test_functions.base import constant_outlier_generator, CorruptedTestProblem
3938

4039
from botorch.test_functions.synthetic import Ackley
@@ -122,7 +121,6 @@ def _get_robust_model(
122121
mean_module=ZeroMean(),
123122
covar_module=kernel,
124123
input_transform=Normalize(d=X.shape[-1]),
125-
outcome_transform=Standardize(m=Y.shape[-1]),
126124
likelihood=likelihood,
127125
)
128126
model.to(dtype=X.dtype, device=self.device)

test/optim/closures/test_model_closures.py

Lines changed: 0 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -10,7 +10,6 @@
1010
import torch
1111
from botorch.models import ModelListGP, SingleTaskGP
1212
from botorch.models.transforms.input import Normalize
13-
from botorch.models.transforms.outcome import Standardize
1413
from botorch.optim.closures.model_closures import (
1514
get_loss_closure,
1615
get_loss_closure_with_grads,
@@ -63,7 +62,6 @@ def _get_mlls(
6362
train_X=train_X,
6463
train_Y=train_Y,
6564
input_transform=Normalize(d=1),
66-
outcome_transform=Standardize(m=1),
6765
)
6866
if wrap_likelihood:
6967
model.likelihood = WrapperLikelihood(model.likelihood)

test/optim/test_fit.py

Lines changed: 0 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -13,7 +13,6 @@
1313
from botorch.exceptions.warnings import OptimizationWarning
1414
from botorch.models import SingleTaskGP
1515
from botorch.models.transforms.input import Normalize
16-
from botorch.models.transforms.outcome import Standardize
1716
from botorch.optim import core, fit
1817
from botorch.optim.core import OptimizationResult, OptimizationStatus
1918
from botorch.utils.context_managers import module_rollback_ctx, TensorCheckpoint
@@ -41,7 +40,6 @@ def setUp(self, suppress_input_warnings: bool = True) -> None:
4140
train_X=train_X,
4241
train_Y=train_Y,
4342
input_transform=Normalize(d=1),
44-
outcome_transform=Standardize(m=1),
4543
)
4644
self.mlls[SingleTaskGP, 1] = ExactMarginalLogLikelihood(model.likelihood, model)
4745

@@ -188,7 +186,6 @@ def setUp(self) -> None:
188186
train_X=train_X,
189187
train_Y=train_Y,
190188
input_transform=Normalize(d=1),
191-
outcome_transform=Standardize(m=1),
192189
)
193190
self.mlls[SingleTaskGP, 1] = ExactMarginalLogLikelihood(model.likelihood, model)
194191

test/sampling/pathwise/test_utils.py

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -64,7 +64,6 @@ def setUp(self):
6464
train_X=train_X,
6565
train_Y=train_Y[:, :num_outputs],
6666
input_transform=Normalize(d=2),
67-
outcome_transform=Standardize(m=num_outputs),
6867
)
6968
)
7069

test/test_cross_validation.py

Lines changed: 0 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -14,7 +14,6 @@
1414
from botorch.models.gp_regression import SingleTaskGP
1515
from botorch.models.multitask import MultiTaskGP
1616
from botorch.models.transforms.input import Normalize
17-
from botorch.models.transforms.outcome import Standardize
1817
from botorch.utils.testing import BotorchTestCase, get_random_data
1918
from gpytorch.mlls.exact_marginal_log_likelihood import ExactMarginalLogLikelihood
2019

@@ -73,9 +72,6 @@ def test_single_task_batch_cv(self) -> None:
7372
self.assertIs(cv_folds.train_X.dtype, dtype)
7473

7574
input_transform = Normalize(d=train_X.shape[-1])
76-
outcome_transform = Standardize(
77-
m=m, batch_shape=torch.Size([*batch_shape, n])
78-
)
7975

8076
with warnings.catch_warnings():
8177
warnings.filterwarnings("ignore", category=OptimizationWarning)
@@ -86,7 +82,6 @@ def test_single_task_batch_cv(self) -> None:
8682
fit_args={"optimizer_kwargs": {"options": {"maxiter": 1}}},
8783
model_init_kwargs={
8884
"input_transform": input_transform,
89-
"outcome_transform": outcome_transform,
9085
},
9186
)
9287
with self.subTest(

test/test_fit.py

Lines changed: 0 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -90,7 +90,6 @@ def setUp(self, suppress_input_warnings: bool = True) -> None:
9090
train_X=train_X,
9191
train_Y=train_Y,
9292
input_transform=Normalize(d=1),
93-
outcome_transform=Standardize(m=1),
9493
)
9594
self.mll = ExactMarginalLogLikelihood(model.likelihood, model)
9695

@@ -137,7 +136,6 @@ def setUp(self, suppress_input_warnings: bool = True) -> None:
137136
train_Y=train_Y,
138137
train_Yvar=torch.full_like(train_Y, 0.1) if fixed_noise else None,
139138
input_transform=Normalize(d=1),
140-
outcome_transform=Standardize(m=output_dim),
141139
)
142140
self.assertIsInstance(model.covar_module, RBFKernel)
143141

0 commit comments

Comments
 (0)