Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
6 changes: 3 additions & 3 deletions bofire/benchmarks/hyperopt.py
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,7 @@ def __init__(
show_progress_bar: bool = False,
) -> None:
super().__init__()
if surrogate_data.hyperconfig is None:
if surrogate_data.hyperconfig_access is None:
raise ValueError("No hyperoptimization configuration found.")
self.surrogate_data = surrogate_data
self.training_data = training_data
Expand All @@ -30,11 +30,11 @@ def __init__(

@property
def domain(self) -> Domain:
return self.surrogate_data.hyperconfig.domain # type: ignore
return self.surrogate_data.hyperconfig_access.domain # type: ignore

@property
def target_metric(self):
return self.surrogate_data.hyperconfig.target_metric # type: ignore
return self.surrogate_data.hyperconfig_access.target_metric # type: ignore

def _f(self, candidates: pd.DataFrame) -> pd.DataFrame:
for i, candidate in tqdm(
Expand Down
26 changes: 23 additions & 3 deletions bofire/data_models/surrogates/bnn.py
Original file line number Diff line number Diff line change
@@ -1,11 +1,31 @@
from typing import Literal, Optional
from typing import Literal, Optional, Type

from pydantic import Field

from bofire.data_models.features.api import AnyOutput
from bofire.data_models.features.continuous import ContinuousOutput
from bofire.data_models.kernels.api import InfiniteWidthBNNKernel
from bofire.data_models.surrogates.single_task_gp import SingleTaskGPSurrogate
from bofire.data_models.priors.api import HVARFNER_NOISE_PRIOR, AnyPrior
from bofire.data_models.surrogates.single_task_gp import TrainableBotorchSurrogate
from bofire.data_models.surrogates.trainable import Hyperconfig


class SingleTaskIBNNSurrogate(SingleTaskGPSurrogate):
class SingleTaskIBNNSurrogate(TrainableBotorchSurrogate):
type: Literal["SingleTaskIBNNSurrogate"] = "SingleTaskIBNNSurrogate"
kernel: InfiniteWidthBNNKernel = InfiniteWidthBNNKernel()
hyperconfig: Optional[Hyperconfig] = None
noise_prior: AnyPrior = Field(default_factory=lambda: HVARFNER_NOISE_PRIOR())

@classmethod
def is_output_implemented(cls, my_type: Type[AnyOutput]) -> bool:
"""Abstract method to check output type for surrogate models
Args:
my_type: continuous or categorical output
Returns:
bool: True if the output type is valid for the surrogate chosen, False otherwise
"""
return isinstance(my_type, type(ContinuousOutput))

@property
def hyperconfig_access(self) -> Optional[Hyperconfig]:
return self.hyperconfig
9 changes: 8 additions & 1 deletion bofire/data_models/surrogates/fully_bayesian.py
Original file line number Diff line number Diff line change
@@ -1,8 +1,9 @@
from typing import Annotated, List, Literal, Type
from typing import Annotated, List, Literal, Optional, Type

from pydantic import AfterValidator, Field, field_validator, model_validator

from bofire.data_models.features.api import AnyOutput, ContinuousOutput
from bofire.data_models.surrogates.trainable import Hyperconfig
from bofire.data_models.surrogates.trainable_botorch import TrainableBotorchSurrogate
from bofire.data_models.types import make_unique_validator

Expand All @@ -19,6 +20,12 @@ class FullyBayesianSingleTaskGPSurrogate(TrainableBotorchSurrogate):
List[str], AfterValidator(make_unique_validator("Features"))
] = []

hyperconfig: Optional[Hyperconfig] = None

@property
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Why are we setting this not over inheritance?

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

With this, TrainableSurrogate only specifies that any (non-abstract) implementation provides access to the generic hyperconfig. Therefore, TrainableSurrogate does not need to know what specific subtype of hyperconfig a surrogate implementation uses which gets rid of the generics.
hyperconfig_access is used in runners/trainable.py and other places.

def hyperconfig_access(self) -> Optional[Hyperconfig]:
return self.hyperconfig

@model_validator(mode="after")
def validate_features_to_warp(self):
input_keys = self.inputs.get_keys()
Expand Down
9 changes: 8 additions & 1 deletion bofire/data_models/surrogates/linear.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
from typing import Literal, Type
from typing import Literal, Optional, Type

from pydantic import Field

Expand All @@ -7,12 +7,19 @@
from bofire.data_models.kernels.api import LinearKernel
from bofire.data_models.priors.api import THREESIX_NOISE_PRIOR, AnyPrior
from bofire.data_models.surrogates.scaler import ScalerEnum
from bofire.data_models.surrogates.trainable import Hyperconfig
from bofire.data_models.surrogates.trainable_botorch import TrainableBotorchSurrogate


class LinearSurrogate(TrainableBotorchSurrogate):
type: Literal["LinearSurrogate"] = "LinearSurrogate"

hyperconfig: Optional[Hyperconfig] = None

@property
def hyperconfig_access(self) -> Optional[Hyperconfig]:
return self.hyperconfig

kernel: LinearKernel = Field(default_factory=lambda: LinearKernel())
noise_prior: AnyPrior = Field(default_factory=lambda: THREESIX_NOISE_PRIOR())
scaler: ScalerEnum = ScalerEnum.NORMALIZE
Expand Down
11 changes: 9 additions & 2 deletions bofire/data_models/surrogates/map_saas.py
Original file line number Diff line number Diff line change
@@ -1,8 +1,9 @@
from typing import Literal, Type
from typing import Literal, Optional, Type

from pydantic import PositiveInt
from pydantic import Field, PositiveInt

from bofire.data_models.features.api import AnyOutput, ContinuousOutput
from bofire.data_models.surrogates.trainable import Hyperconfig
from bofire.data_models.surrogates.trainable_botorch import TrainableBotorchSurrogate


Expand All @@ -25,6 +26,12 @@ class AdditiveMapSaasSingleTaskGPSurrogate(TrainableBotorchSurrogate):
)
n_taus: PositiveInt = 4

hyperconfig: Optional[Hyperconfig] = Field(default=None)

@property
def hyperconfig_access(self) -> Optional[Hyperconfig]:
return self.hyperconfig

@classmethod
def is_output_implemented(cls, my_type: Type[AnyOutput]) -> bool:
"""Abstract method to check output type for surrogate models
Expand Down
99 changes: 52 additions & 47 deletions bofire/data_models/surrogates/mixed_single_task_gp.py
Original file line number Diff line number Diff line change
Expand Up @@ -54,53 +54,6 @@ class MixedSingleTaskGPHyperconfig(Hyperconfig):
"FractionalFactorialStrategy", "SoboStrategy", "RandomStrategy"
] = "FractionalFactorialStrategy"

@staticmethod
def _update_hyperparameters(
surrogate_data: "MixedSingleTaskGPSurrogate",
hyperparameters: pd.Series,
):
if hyperparameters.prior == "mbo":
noise_prior, lengthscale_prior, _ = (
MBO_NOISE_PRIOR(),
MBO_LENGTHSCALE_PRIOR(),
MBO_OUTPUTSCALE_PRIOR(),
)
elif hyperparameters.prior == "threesix":
noise_prior, lengthscale_prior, _ = (
THREESIX_NOISE_PRIOR(),
THREESIX_LENGTHSCALE_PRIOR(),
THREESIX_SCALE_PRIOR(),
)
else:
noise_prior, lengthscale_prior = (
HVARFNER_NOISE_PRIOR(),
HVARFNER_LENGTHSCALE_PRIOR(),
)

surrogate_data.noise_prior = noise_prior
if hyperparameters.continuous_kernel == "rbf":
surrogate_data.continuous_kernel = RBFKernel(
ard=hyperparameters.ard,
lengthscale_prior=lengthscale_prior,
)

elif hyperparameters.continuous_kernel == "matern_2.5":
surrogate_data.continuous_kernel = MaternKernel(
ard=hyperparameters.ard,
lengthscale_prior=lengthscale_prior,
nu=2.5,
)

elif hyperparameters.continuous_kernel == "matern_1.5":
surrogate_data.continuous_kernel = MaternKernel(
ard=hyperparameters.ard,
lengthscale_prior=lengthscale_prior,
nu=1.5,
)

else:
raise ValueError(f"Kernel {hyperparameters.kernel} not known.")


class MixedSingleTaskGPSurrogate(TrainableBotorchSurrogate):
type: Literal["MixedSingleTaskGPSurrogate"] = "MixedSingleTaskGPSurrogate"
Expand All @@ -121,6 +74,10 @@ class MixedSingleTaskGPSurrogate(TrainableBotorchSurrogate):
default_factory=lambda: MixedSingleTaskGPHyperconfig(),
)

@property
def hyperconfig_access(self) -> Optional[Hyperconfig]:
return self.hyperconfig

@classmethod
def _default_categorical_encodings(
cls,
Expand Down Expand Up @@ -190,3 +147,51 @@ def is_output_implemented(cls, my_type: Type[AnyOutput]) -> bool:
bool: True if the output type is valid for the surrogate chosen, False otherwise
"""
return isinstance(my_type, type(ContinuousOutput))

def update_hyperparameters(
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Hmm, why are you moving it outside of the Hyperconfig? Is this necessary?

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I think this way the relationship surrogate <-> hyperconfig becomes clearer. Before, there was an inversion of responsibility where the hyperconfig would modify its surrogate in a manner which I did not anticipate.

self,
hyperparameters: pd.Series,
):
super().update_hyperparameters(hyperparameters)

if hyperparameters.prior == "mbo":
noise_prior, lengthscale_prior, _ = (
MBO_NOISE_PRIOR(),
MBO_LENGTHSCALE_PRIOR(),
MBO_OUTPUTSCALE_PRIOR(),
)
elif hyperparameters.prior == "threesix":
noise_prior, lengthscale_prior, _ = (
THREESIX_NOISE_PRIOR(),
THREESIX_LENGTHSCALE_PRIOR(),
THREESIX_SCALE_PRIOR(),
)
else:
noise_prior, lengthscale_prior = (
HVARFNER_NOISE_PRIOR(),
HVARFNER_LENGTHSCALE_PRIOR(),
)

self.noise_prior = noise_prior
if hyperparameters.continuous_kernel == "rbf":
self.continuous_kernel = RBFKernel(
ard=hyperparameters.ard,
lengthscale_prior=lengthscale_prior,
)

elif hyperparameters.continuous_kernel == "matern_2.5":
self.continuous_kernel = MaternKernel(
ard=hyperparameters.ard,
lengthscale_prior=lengthscale_prior,
nu=2.5,
)

elif hyperparameters.continuous_kernel == "matern_1.5":
self.continuous_kernel = MaternKernel(
ard=hyperparameters.ard,
lengthscale_prior=lengthscale_prior,
nu=1.5,
)

else:
raise ValueError(f"Kernel {hyperparameters.kernel} not known.")
9 changes: 8 additions & 1 deletion bofire/data_models/surrogates/mlp.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
from collections.abc import Sequence
from typing import Annotated, Literal, Type
from typing import Annotated, Literal, Optional, Type

from pydantic import Field

Expand All @@ -9,11 +9,18 @@
ContinuousOutput,
)
from bofire.data_models.surrogates.scaler import ScalerEnum
from bofire.data_models.surrogates.trainable import Hyperconfig
from bofire.data_models.surrogates.trainable_botorch import TrainableBotorchSurrogate


class MLPEnsemble(TrainableBotorchSurrogate):
type: Literal["MLPEnsemble"] = "MLPEnsemble"
hyperconfig: Optional[Hyperconfig] = None

@property
def hyperconfig_access(self) -> Optional[Hyperconfig]:
return self.hyperconfig

n_estimators: Annotated[int, Field(ge=1)] = 5
hidden_layer_sizes: Sequence = (100,)
activation: Literal["relu", "logistic", "tanh"] = "relu"
Expand Down
87 changes: 46 additions & 41 deletions bofire/data_models/surrogates/multi_task_gp.py
Original file line number Diff line number Diff line change
Expand Up @@ -44,47 +44,6 @@ class MultiTaskGPHyperconfig(Hyperconfig):
"FractionalFactorialStrategy", "SoboStrategy", "RandomStrategy"
] = "FractionalFactorialStrategy"

@staticmethod
def _update_hyperparameters(
surrogate_data: "MultiTaskGPSurrogate",
hyperparameters: pd.Series,
):
def matern_25(ard: bool, lengthscale_prior: AnyPrior) -> MaternKernel:
return MaternKernel(nu=2.5, lengthscale_prior=lengthscale_prior, ard=ard)

def matern_15(ard: bool, lengthscale_prior: AnyPrior) -> MaternKernel:
return MaternKernel(nu=1.5, lengthscale_prior=lengthscale_prior, ard=ard)

if hyperparameters.prior == "mbo":
noise_prior, lengthscale_prior = (
MBO_NOISE_PRIOR(),
MBO_LENGTHSCALE_PRIOR(),
)
else:
noise_prior, lengthscale_prior = (
THREESIX_NOISE_PRIOR(),
THREESIX_LENGTHSCALE_PRIOR(),
)

surrogate_data.noise_prior = noise_prior
if hyperparameters.kernel == "rbf":
surrogate_data.kernel = RBFKernel(
ard=hyperparameters.ard,
lengthscale_prior=lengthscale_prior,
)
elif hyperparameters.kernel == "matern_2.5":
surrogate_data.kernel = matern_25(
ard=hyperparameters.ard,
lengthscale_prior=lengthscale_prior,
)
elif hyperparameters.kernel == "matern_1.5":
surrogate_data.kernel = matern_15(
ard=hyperparameters.ard,
lengthscale_prior=lengthscale_prior,
)
else:
raise ValueError(f"Kernel {hyperparameters.kernel} not known.")


class MultiTaskGPSurrogate(TrainableBotorchSurrogate):
type: Literal["MultiTaskGPSurrogate"] = "MultiTaskGPSurrogate"
Expand All @@ -101,6 +60,10 @@ class MultiTaskGPSurrogate(TrainableBotorchSurrogate):
default_factory=lambda: MultiTaskGPHyperconfig(),
)

@property
def hyperconfig_access(self) -> Optional[Hyperconfig]:
return self.hyperconfig

@classmethod
def _default_categorical_encodings(
cls,
Expand Down Expand Up @@ -155,3 +118,45 @@ def validate_encoding(cls, v, info):
)

return v

def update_hyperparameters(
self,
hyperparameters: pd.Series,
):
super().update_hyperparameters(hyperparameters)

def matern_25(ard: bool, lengthscale_prior: AnyPrior) -> MaternKernel:
return MaternKernel(nu=2.5, lengthscale_prior=lengthscale_prior, ard=ard)

def matern_15(ard: bool, lengthscale_prior: AnyPrior) -> MaternKernel:
return MaternKernel(nu=1.5, lengthscale_prior=lengthscale_prior, ard=ard)

if hyperparameters.prior == "mbo":
noise_prior, lengthscale_prior = (
MBO_NOISE_PRIOR(),
MBO_LENGTHSCALE_PRIOR(),
)
else:
noise_prior, lengthscale_prior = (
THREESIX_NOISE_PRIOR(),
THREESIX_LENGTHSCALE_PRIOR(),
)

self.noise_prior = noise_prior
if hyperparameters.kernel == "rbf":
self.kernel = RBFKernel(
ard=hyperparameters.ard,
lengthscale_prior=lengthscale_prior,
)
elif hyperparameters.kernel == "matern_2.5":
self.kernel = matern_25(
ard=hyperparameters.ard,
lengthscale_prior=lengthscale_prior,
)
elif hyperparameters.kernel == "matern_1.5":
self.kernel = matern_15(
ard=hyperparameters.ard,
lengthscale_prior=lengthscale_prior,
)
else:
raise ValueError(f"Kernel {hyperparameters.kernel} not known.")
Loading
Loading