2424from botorch .acquisition .acquisition import AcquisitionFunction
2525from botorch .acquisition .objective import PosteriorTransform
2626from botorch .exceptions import UnsupportedError
27+ from botorch .exceptions .warnings import legacy_ei_numerics_warning
2728from botorch .models .gp_regression import SingleTaskGP
2829from botorch .models .gpytorch import GPyTorchModel
2930from botorch .models .model import Model
@@ -311,9 +312,9 @@ class ExpectedImprovement(AnalyticAcquisitionFunction):
311312 >>> EI = ExpectedImprovement(model, best_f=0.2)
312313 >>> ei = EI(test_X)
313314
314- NOTE: It is * strongly* recommended to use LogExpectedImprovement instead of regular
315- EI, because it solves the vanishing gradient problem by taking special care of
316- numerical computations and can lead to substantially improved BO performance .
315+ NOTE: It is strongly recommended to use LogExpectedImprovement instead of regular
316+ EI, as it can lead to substantially improved BO performance through improved
317+ numerics. See https://arxiv.org/abs/2310.20708 for details .
317318 """
318319
319320 def __init__ (
@@ -334,6 +335,7 @@ def __init__(
334335 single-output posterior is required.
335336 maximize: If True, consider the problem a maximization problem.
336337 """
338+ legacy_ei_numerics_warning (legacy_name = type (self ).__name__ )
337339 super ().__init__ (model = model , posterior_transform = posterior_transform )
338340 self .register_buffer ("best_f" , torch .as_tensor (best_f ))
339341 self .maximize = maximize
@@ -358,7 +360,7 @@ def forward(self, X: Tensor) -> Tensor:
358360
359361
360362class LogExpectedImprovement (AnalyticAcquisitionFunction ):
361- r"""Logarithm of single -outcome Expected Improvement (analytic).
363+ r"""Single -outcome Log Expected Improvement (analytic).
362364
363365 Computes the logarithm of the classic Expected Improvement acquisition function, in
364366 a numerically robust manner. In particular, the implementation takes special care
@@ -520,6 +522,10 @@ class ConstrainedExpectedImprovement(AnalyticAcquisitionFunction):
520522 >>> constraints = {0: (0.0, None)}
521523 >>> cEI = ConstrainedExpectedImprovement(model, 0.2, 1, constraints)
522524 >>> cei = cEI(test_X)
525+
526+ NOTE: It is strongly recommended to use LogConstrainedExpectedImprovement instead
527+ of regular CEI, as it can lead to substantially improved BO performance through
528+ improved numerics. See https://arxiv.org/abs/2310.20708 for details.
523529 """
524530
525531 def __init__ (
@@ -542,6 +548,7 @@ def __init__(
542548 bounds on that output (resp. interpreted as -Inf / Inf if None)
543549 maximize: If True, consider the problem a maximization problem.
544550 """
551+ legacy_ei_numerics_warning (legacy_name = type (self ).__name__ )
545552 # Use AcquisitionFunction constructor to avoid check for posterior transform.
546553 super (AnalyticAcquisitionFunction , self ).__init__ (model = model )
547554 self .posterior_transform = None
@@ -676,6 +683,10 @@ class NoisyExpectedImprovement(ExpectedImprovement):
676683 >>> model = SingleTaskGP(train_X, train_Y, train_Yvar=train_Yvar)
677684 >>> NEI = NoisyExpectedImprovement(model, train_X)
678685 >>> nei = NEI(test_X)
686+
687+ NOTE: It is strongly recommended to use LogNoisyExpectedImprovement instead
688+ of regular NEI, as it can lead to substantially improved BO performance through
689+ improved numerics. See https://arxiv.org/abs/2310.20708 for details.
679690 """
680691
681692 def __init__ (
@@ -696,6 +707,7 @@ def __init__(
696707 complexity and performance).
697708 maximize: If True, consider the problem a maximization problem.
698709 """
710+ legacy_ei_numerics_warning (legacy_name = type (self ).__name__ )
699711 # sample fantasies
700712 from botorch .sampling .normal import SobolQMCNormalSampler
701713
0 commit comments