Skip to content

Commit

Permalink
NumericsWarning for Legacy EI (#2429)
Browse files Browse the repository at this point in the history
Summary:
Pull Request resolved: #2429

This commit adds a `NumericsWarning` and the`legacy_ei_numerics_warning` helper, which raises the following warning message for EI, and similar for other legacy EI variants:
```
NumericsWarning: ExpectedImprovement has known numerical issues that lead to suboptimal optimization performance.
It is strongly recommended to simply replace

         ExpectedImprovement     -->     LogExpectedImprovement

instead, which fixes the issues and has the same API. See https://arxiv.org/abs/2310.20708 for details.
```

Reviewed By: esantorella

Differential Revision: D59598723

fbshipit-source-id: dc9be73ffe145d71fe14454e60d10129c3bdae4b
  • Loading branch information
SebastianAment authored and facebook-github-bot committed Jul 11, 2024
1 parent 41466be commit 6892be9
Show file tree
Hide file tree
Showing 8 changed files with 1,110 additions and 902 deletions.
20 changes: 16 additions & 4 deletions botorch/acquisition/analytic.py
Original file line number Diff line number Diff line change
Expand Up @@ -24,6 +24,7 @@
from botorch.acquisition.acquisition import AcquisitionFunction
from botorch.acquisition.objective import PosteriorTransform
from botorch.exceptions import UnsupportedError
from botorch.exceptions.warnings import legacy_ei_numerics_warning
from botorch.models.gp_regression import SingleTaskGP
from botorch.models.gpytorch import GPyTorchModel
from botorch.models.model import Model
Expand Down Expand Up @@ -311,9 +312,9 @@ class ExpectedImprovement(AnalyticAcquisitionFunction):
>>> EI = ExpectedImprovement(model, best_f=0.2)
>>> ei = EI(test_X)
NOTE: It is *strongly* recommended to use LogExpectedImprovement instead of regular
EI, because it solves the vanishing gradient problem by taking special care of
numerical computations and can lead to substantially improved BO performance.
NOTE: It is strongly recommended to use LogExpectedImprovement instead of regular
EI, as it can lead to substantially improved BO performance through improved
numerics. See https://arxiv.org/abs/2310.20708 for details.
"""

def __init__(
Expand All @@ -334,6 +335,7 @@ def __init__(
single-output posterior is required.
maximize: If True, consider the problem a maximization problem.
"""
legacy_ei_numerics_warning(legacy_name=type(self).__name__)
super().__init__(model=model, posterior_transform=posterior_transform)
self.register_buffer("best_f", torch.as_tensor(best_f))
self.maximize = maximize
Expand All @@ -358,7 +360,7 @@ def forward(self, X: Tensor) -> Tensor:


class LogExpectedImprovement(AnalyticAcquisitionFunction):
r"""Logarithm of single-outcome Expected Improvement (analytic).
r"""Single-outcome Log Expected Improvement (analytic).
Computes the logarithm of the classic Expected Improvement acquisition function, in
a numerically robust manner. In particular, the implementation takes special care
Expand Down Expand Up @@ -520,6 +522,10 @@ class ConstrainedExpectedImprovement(AnalyticAcquisitionFunction):
>>> constraints = {0: (0.0, None)}
>>> cEI = ConstrainedExpectedImprovement(model, 0.2, 1, constraints)
>>> cei = cEI(test_X)
NOTE: It is strongly recommended to use LogConstrainedExpectedImprovement instead
of regular CEI, as it can lead to substantially improved BO performance through
improved numerics. See https://arxiv.org/abs/2310.20708 for details.
"""

def __init__(
Expand All @@ -542,6 +548,7 @@ def __init__(
bounds on that output (resp. interpreted as -Inf / Inf if None)
maximize: If True, consider the problem a maximization problem.
"""
legacy_ei_numerics_warning(legacy_name=type(self).__name__)
# Use AcquisitionFunction constructor to avoid check for posterior transform.
super(AnalyticAcquisitionFunction, self).__init__(model=model)
self.posterior_transform = None
Expand Down Expand Up @@ -676,6 +683,10 @@ class NoisyExpectedImprovement(ExpectedImprovement):
>>> model = SingleTaskGP(train_X, train_Y, train_Yvar=train_Yvar)
>>> NEI = NoisyExpectedImprovement(model, train_X)
>>> nei = NEI(test_X)
NOTE: It is strongly recommended to use LogNoisyExpectedImprovement instead
of regular NEI, as it can lead to substantially improved BO performance through
improved numerics. See https://arxiv.org/abs/2310.20708 for details.
"""

def __init__(
Expand All @@ -696,6 +707,7 @@ def __init__(
complexity and performance).
maximize: If True, consider the problem a maximization problem.
"""
legacy_ei_numerics_warning(legacy_name=type(self).__name__)
# sample fantasies
from botorch.sampling.normal import SobolQMCNormalSampler

Expand Down
11 changes: 11 additions & 0 deletions botorch/acquisition/monte_carlo.py
Original file line number Diff line number Diff line change
Expand Up @@ -43,6 +43,7 @@
repeat_to_match_aug_dim,
)
from botorch.exceptions.errors import UnsupportedError
from botorch.exceptions.warnings import legacy_ei_numerics_warning
from botorch.models.model import Model
from botorch.sampling.base import MCSampler
from botorch.utils.objective import compute_smoothed_feasibility_indicator
Expand Down Expand Up @@ -348,6 +349,10 @@ class qExpectedImprovement(SampleReducingMCAcquisitionFunction):
>>> sampler = SobolQMCNormalSampler(1024)
>>> qEI = qExpectedImprovement(model, best_f, sampler)
>>> qei = qEI(test_X)
NOTE: It is strongly recommended to use qLogExpectedImprovement instead
of regular qEI, as it can lead to substantially improved BO performance through
improved numerics. See https://arxiv.org/abs/2310.20708 for details.
"""

def __init__(
Expand Down Expand Up @@ -387,6 +392,7 @@ def __init__(
approximation to the constraint indicators. For more details, on this
parameter, see the docs of `compute_smoothed_feasibility_indicator`.
"""
legacy_ei_numerics_warning(legacy_name=type(self).__name__)
super().__init__(
model=model,
sampler=sampler,
Expand Down Expand Up @@ -428,6 +434,10 @@ class qNoisyExpectedImprovement(
>>> sampler = SobolQMCNormalSampler(1024)
>>> qNEI = qNoisyExpectedImprovement(model, train_X, sampler)
>>> qnei = qNEI(test_X)
NOTE: It is strongly recommended to use qLogNoisyExpectedImprovement instead
of regular qNEI, as it can lead to substantially improved BO performance through
improved numerics. See https://arxiv.org/abs/2310.20708 for details.
"""

def __init__(
Expand Down Expand Up @@ -484,6 +494,7 @@ def __init__(
the incremental qNEI from the new point. This would greatly increase
efficiency for large batches.
"""
legacy_ei_numerics_warning(legacy_name=type(self).__name__)
super().__init__(
model=model,
sampler=sampler,
Expand Down
3 changes: 3 additions & 0 deletions botorch/acquisition/multi_objective/monte_carlo.py
Original file line number Diff line number Diff line change
Expand Up @@ -36,6 +36,7 @@
MCMultiOutputObjective,
)
from botorch.exceptions.errors import UnsupportedError
from botorch.exceptions.warnings import legacy_ei_numerics_warning
from botorch.models.model import Model
from botorch.models.transforms.input import InputPerturbation
from botorch.sampling.base import MCSampler
Expand Down Expand Up @@ -199,6 +200,7 @@ def __init__(
fat: A Boolean flag indicating whether to use the heavy-tailed approximation
of the constraint indicator.
"""
legacy_ei_numerics_warning(legacy_name=type(self).__name__)
if len(ref_point) != partitioning.num_outcomes:
raise ValueError(
"The length of the reference point must match the number of outcomes. "
Expand Down Expand Up @@ -408,6 +410,7 @@ def __init__(
marginalize_dim: A batch dimension that should be marginalized. For example,
this is useful when using a batched fully Bayesian model.
"""
legacy_ei_numerics_warning(legacy_name=type(self).__name__)
MultiObjectiveMCAcquisitionFunction.__init__(
self,
model=model,
Expand Down
44 changes: 43 additions & 1 deletion botorch/exceptions/warnings.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,7 @@
r"""
Botorch Warnings.
"""
import warnings


class BotorchWarning(Warning):
Expand Down Expand Up @@ -34,7 +35,7 @@ class CostAwareWarning(BotorchWarning):


class OptimizationWarning(BotorchWarning):
r"""Optimization-releated warnings."""
r"""Optimization-related warnings."""

pass

Expand All @@ -57,6 +58,47 @@ class UserInputWarning(BotorchWarning):
pass


class NumericsWarning(BotorchWarning):
r"""Warning raised when numerical issues are detected."""

pass


def legacy_ei_numerics_warning(legacy_name: str) -> None:
"""Raises a warning for legacy EI acquisition functions that are known to have
numerical issues and should be replaced with the LogEI version for virtually all
use-cases except for explicit benchmarking of the numerical issues of legacy EI.
Args:
legacy_name: The name of the legacy EI acquisition function.
logei_name: The name of the associated LogEI acquisition function.
"""
legacy_to_logei = {
"ExpectedImprovement": "LogExpectedImprovement",
"ConstrainedExpectedImprovement": "LogConstrainedExpectedImprovement",
"NoisyExpectedImprovement": "LogNoisyExpectedImprovement",
"qExpectedImprovement": "qLogExpectedImprovement",
"qNoisyExpectedImprovement": "qLogNoisyExpectedImprovement",
"qExpectedHypervolumeImprovement": "qLogExpectedHypervolumeImprovement",
"qNoisyExpectedHypervolumeImprovement": (
"qLogNoisyExpectedHypervolumeImprovement"
),
}
# Only raise the warning if the legacy name is in the mapping. It can fail to be in
# the mapping if the legacy acquisition function derives from a legacy EI class,
# e.g. MOMF, which derives from qEHVI, but there is not corresponding LogMOMF yet.
if legacy_name in legacy_to_logei:
logei_name = legacy_to_logei[legacy_name]
msg = (
f"{legacy_name} has known numerical issues that lead to suboptimal "
"optimization performance. It is strongly recommended to simply replace"
f"\n\n\t {legacy_name} \t --> \t {logei_name} \n\n"
"instead, which fixes the issues and has the same "
"API. See https://arxiv.org/abs/2310.20708 for details."
)
warnings.warn(msg, NumericsWarning, stacklevel=2)


def _get_single_precision_warning(dtype_str: str) -> str:
msg = (
f"The model inputs are of type {dtype_str}. It is strongly recommended "
Expand Down
Loading

0 comments on commit 6892be9

Please sign in to comment.