Skip to content

Commit 1b11aca

Browse files
esantorellafacebook-github-bot
authored andcommitted
Reap deprecated **kwargs argument from optimize_acqf variants (#2390)
Summary: ## Motivation This code was deprecated somewhere between 0.8.0 and 0.9.0; as we are now past 0.11.0, it can be reaped. Pull Request resolved: #2390 Test Plan: Existing units, including tutorials. ## Related PRs #1677 Reviewed By: Balandat Differential Revision: D58930631 Pulled By: esantorella fbshipit-source-id: fc6c3900baa53fa8d04299a1e398f23aa1f3aa53
1 parent ef73ea6 commit 1b11aca

File tree

2 files changed

+2
-43
lines changed

2 files changed

+2
-43
lines changed

botorch/optim/optimize.py

Lines changed: 0 additions & 31 deletions
Original file line numberDiff line numberDiff line change
@@ -137,25 +137,6 @@ def get_ic_generator(self) -> TGenInitialConditions:
137137
return gen_batch_initial_conditions
138138

139139

140-
def _raise_deprecation_warning_if_kwargs(fn_name: str, kwargs: Dict[str, Any]) -> None:
141-
"""
142-
Raise a warning if kwargs are provided.
143-
144-
Some functions used to support **kwargs. The applicable parameters have now been
145-
refactored to be named arguments, so no warning will be raised for users passing
146-
the expected arguments. However, if a user had been passing an inapplicable
147-
keyword argument, this will now raise a warning whereas in the past it did
148-
nothing.
149-
"""
150-
if len(kwargs) > 0:
151-
warnings.warn(
152-
f"`{fn_name}` does not support arguments {list(kwargs.keys())}. In "
153-
"the future, this will become an error.",
154-
DeprecationWarning,
155-
stacklevel=2,
156-
)
157-
158-
159140
def _optimize_acqf_all_features_fixed(
160141
*,
161142
bounds: Tensor,
@@ -866,7 +847,6 @@ def optimize_acqf_mixed(
866847
batch_initial_conditions: Optional[Tensor] = None,
867848
ic_generator: Optional[TGenInitialConditions] = None,
868849
ic_gen_kwargs: Optional[Dict] = None,
869-
**kwargs: Any,
870850
) -> Tuple[Tensor, Tensor]:
871851
r"""Optimize over a list of fixed_features and returns the best solution.
872852
@@ -920,8 +900,6 @@ def optimize_acqf_mixed(
920900
for nonlinear inequality constraints.
921901
ic_gen_kwargs: Additional keyword arguments passed to function specified by
922902
`ic_generator`
923-
kwargs: kwargs do nothing. This is provided so that the same arguments can
924-
be passed to different acquisition functions without raising an error.
925903
926904
Returns:
927905
A two-element tuple containing
@@ -939,7 +917,6 @@ def optimize_acqf_mixed(
939917
"are currently not supported when `q > 1`. This is needed to "
940918
"compute the joint acquisition value."
941919
)
942-
_raise_deprecation_warning_if_kwargs("optimize_acqf_mixed", kwargs)
943920

944921
ic_gen_kwargs = ic_gen_kwargs or {}
945922

@@ -1016,7 +993,6 @@ def optimize_acqf_discrete(
1016993
choices: Tensor,
1017994
max_batch_size: int = 2048,
1018995
unique: bool = True,
1019-
**kwargs: Any,
1020996
) -> Tuple[Tensor, Tensor]:
1021997
r"""Optimize over a discrete set of points using batch evaluation.
1022998
@@ -1034,8 +1010,6 @@ def optimize_acqf_discrete(
10341010
a large training set.
10351011
unique: If True return unique choices, o/w choices may be repeated
10361012
(only relevant if `q > 1`).
1037-
kwargs: kwargs do nothing. This is provided so that the same arguments can
1038-
be passed to different acquisition functions without raising an error.
10391013
10401014
Returns:
10411015
A two-element tuple containing
@@ -1050,7 +1024,6 @@ def optimize_acqf_discrete(
10501024
)
10511025
if choices.numel() == 0:
10521026
raise InputDataError("`choices` must be non-emtpy.")
1053-
_raise_deprecation_warning_if_kwargs("optimize_acqf_discrete", kwargs)
10541027
choices_batched = choices.unsqueeze(-2)
10551028
if q > 1:
10561029
candidate_list, acq_value_list = [], []
@@ -1168,7 +1141,6 @@ def optimize_acqf_discrete_local_search(
11681141
batch_initial_conditions: Optional[Tensor] = None,
11691142
max_batch_size: int = 2048,
11701143
unique: bool = True,
1171-
**kwargs: Any,
11721144
) -> Tuple[Tensor, Tensor]:
11731145
r"""Optimize acquisition function over a lattice.
11741146
@@ -1201,16 +1173,13 @@ def optimize_acqf_discrete_local_search(
12011173
a large training set.
12021174
unique: If True return unique choices, o/w choices may be repeated
12031175
(only relevant if `q > 1`).
1204-
kwargs: kwargs do nothing. This is provided so that the same arguments can
1205-
be passed to different acquisition functions without raising an error.
12061176
12071177
Returns:
12081178
A two-element tuple containing
12091179
12101180
- a `q x d`-dim tensor of generated candidates.
12111181
- an associated acquisition value.
12121182
"""
1213-
_raise_deprecation_warning_if_kwargs("optimize_acqf_discrete_local_search", kwargs)
12141183
candidate_list = []
12151184
base_X_pending = acq_function.X_pending if q > 1 else None
12161185
base_X_avoid = X_avoid

test/optim/test_optimize.py

Lines changed: 2 additions & 12 deletions
Original file line numberDiff line numberDiff line change
@@ -94,8 +94,8 @@ class SinOneOverXAcqusitionFunction(MockAcquisitionFunction):
9494
"""
9595
Acquisition function for sin(1/x).
9696
97-
This is useful for testing because it behaves pathologically only zero, so
98-
optimization is likely to fail when initializing near zero but not
97+
This is useful for testing because it behaves pathologically only near zero,
98+
so optimization is likely to fail when initializing near zero but not
9999
elsewhere.
100100
"""
101101

@@ -1565,16 +1565,6 @@ def test_optimize_acqf_discrete(self):
15651565

15661566
choices = torch.rand(5, 2, **tkwargs)
15671567

1568-
# warning for unsupported keyword arguments
1569-
with self.assertWarnsRegex(
1570-
DeprecationWarning,
1571-
r"`optimize_acqf_discrete` does not support arguments "
1572-
r"\['num_restarts'\]. In the future, this will become an error.",
1573-
):
1574-
optimize_acqf_discrete(
1575-
acq_function=mock_acq_function, q=q, choices=choices, num_restarts=8
1576-
)
1577-
15781568
exp_acq_vals = mock_acq_function(choices)
15791569

15801570
# test unique

0 commit comments

Comments
 (0)