-
Notifications
You must be signed in to change notification settings - Fork 42
Make method for surrogates #671
New issue
Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.
By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.
Already on GitHub? Sign in to your account
base: main
Are you sure you want to change the base?
Changes from all commits
1f73e30
9907d91
cf0cdb7
3da10b1
c6c9b87
4ff4eb0
File filter
Filter by extension
Conversations
Jump to
Diff view
Diff view
There are no files selected for viewing
| Original file line number | Diff line number | Diff line change |
|---|---|---|
| @@ -1,11 +1,31 @@ | ||
| from typing import Literal, Optional | ||
| from typing import Literal, Optional, Type | ||
|
|
||
| from pydantic import Field | ||
|
|
||
| from bofire.data_models.features.api import AnyOutput | ||
| from bofire.data_models.features.continuous import ContinuousOutput | ||
| from bofire.data_models.kernels.api import InfiniteWidthBNNKernel | ||
| from bofire.data_models.surrogates.single_task_gp import SingleTaskGPSurrogate | ||
| from bofire.data_models.priors.api import HVARFNER_NOISE_PRIOR, AnyPrior | ||
| from bofire.data_models.surrogates.single_task_gp import TrainableBotorchSurrogate | ||
| from bofire.data_models.surrogates.trainable import Hyperconfig | ||
|
|
||
|
|
||
| class SingleTaskIBNNSurrogate(SingleTaskGPSurrogate): | ||
| class SingleTaskIBNNSurrogate(TrainableBotorchSurrogate): | ||
| type: Literal["SingleTaskIBNNSurrogate"] = "SingleTaskIBNNSurrogate" | ||
| kernel: InfiniteWidthBNNKernel = InfiniteWidthBNNKernel() | ||
| hyperconfig: Optional[Hyperconfig] = None | ||
| noise_prior: AnyPrior = Field(default_factory=lambda: HVARFNER_NOISE_PRIOR()) | ||
|
|
||
| @classmethod | ||
| def is_output_implemented(cls, my_type: Type[AnyOutput]) -> bool: | ||
| """Abstract method to check output type for surrogate models | ||
| Args: | ||
| my_type: continuous or categorical output | ||
| Returns: | ||
| bool: True if the output type is valid for the surrogate chosen, False otherwise | ||
| """ | ||
| return isinstance(my_type, type(ContinuousOutput)) | ||
|
|
||
| @property | ||
| def hyperconfig_access(self) -> Optional[Hyperconfig]: | ||
| return self.hyperconfig |
| Original file line number | Diff line number | Diff line change |
|---|---|---|
|
|
@@ -54,53 +54,6 @@ class MixedSingleTaskGPHyperconfig(Hyperconfig): | |
| "FractionalFactorialStrategy", "SoboStrategy", "RandomStrategy" | ||
| ] = "FractionalFactorialStrategy" | ||
|
|
||
| @staticmethod | ||
| def _update_hyperparameters( | ||
| surrogate_data: "MixedSingleTaskGPSurrogate", | ||
| hyperparameters: pd.Series, | ||
| ): | ||
| if hyperparameters.prior == "mbo": | ||
| noise_prior, lengthscale_prior, _ = ( | ||
| MBO_NOISE_PRIOR(), | ||
| MBO_LENGTHSCALE_PRIOR(), | ||
| MBO_OUTPUTSCALE_PRIOR(), | ||
| ) | ||
| elif hyperparameters.prior == "threesix": | ||
| noise_prior, lengthscale_prior, _ = ( | ||
| THREESIX_NOISE_PRIOR(), | ||
| THREESIX_LENGTHSCALE_PRIOR(), | ||
| THREESIX_SCALE_PRIOR(), | ||
| ) | ||
| else: | ||
| noise_prior, lengthscale_prior = ( | ||
| HVARFNER_NOISE_PRIOR(), | ||
| HVARFNER_LENGTHSCALE_PRIOR(), | ||
| ) | ||
|
|
||
| surrogate_data.noise_prior = noise_prior | ||
| if hyperparameters.continuous_kernel == "rbf": | ||
| surrogate_data.continuous_kernel = RBFKernel( | ||
| ard=hyperparameters.ard, | ||
| lengthscale_prior=lengthscale_prior, | ||
| ) | ||
|
|
||
| elif hyperparameters.continuous_kernel == "matern_2.5": | ||
| surrogate_data.continuous_kernel = MaternKernel( | ||
| ard=hyperparameters.ard, | ||
| lengthscale_prior=lengthscale_prior, | ||
| nu=2.5, | ||
| ) | ||
|
|
||
| elif hyperparameters.continuous_kernel == "matern_1.5": | ||
| surrogate_data.continuous_kernel = MaternKernel( | ||
| ard=hyperparameters.ard, | ||
| lengthscale_prior=lengthscale_prior, | ||
| nu=1.5, | ||
| ) | ||
|
|
||
| else: | ||
| raise ValueError(f"Kernel {hyperparameters.kernel} not known.") | ||
|
|
||
|
|
||
| class MixedSingleTaskGPSurrogate(TrainableBotorchSurrogate): | ||
| type: Literal["MixedSingleTaskGPSurrogate"] = "MixedSingleTaskGPSurrogate" | ||
|
|
@@ -121,6 +74,10 @@ class MixedSingleTaskGPSurrogate(TrainableBotorchSurrogate): | |
| default_factory=lambda: MixedSingleTaskGPHyperconfig(), | ||
| ) | ||
|
|
||
| @property | ||
| def hyperconfig_access(self) -> Optional[Hyperconfig]: | ||
| return self.hyperconfig | ||
|
|
||
| @classmethod | ||
| def _default_categorical_encodings( | ||
| cls, | ||
|
|
@@ -190,3 +147,51 @@ def is_output_implemented(cls, my_type: Type[AnyOutput]) -> bool: | |
| bool: True if the output type is valid for the surrogate chosen, False otherwise | ||
| """ | ||
| return isinstance(my_type, type(ContinuousOutput)) | ||
|
|
||
| def update_hyperparameters( | ||
|
Contributor
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. Hmm, why are you moving it outside of the Hyperconfig? Is this necessary?
Contributor
Author
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. I think this way the relationship surrogate <-> hyperconfig becomes clearer. Before, there was an inversion of responsibility where the hyperconfig would modify its surrogate in a manner which I did not anticipate. |
||
| self, | ||
| hyperparameters: pd.Series, | ||
| ): | ||
| super().update_hyperparameters(hyperparameters) | ||
|
|
||
| if hyperparameters.prior == "mbo": | ||
| noise_prior, lengthscale_prior, _ = ( | ||
| MBO_NOISE_PRIOR(), | ||
| MBO_LENGTHSCALE_PRIOR(), | ||
| MBO_OUTPUTSCALE_PRIOR(), | ||
| ) | ||
| elif hyperparameters.prior == "threesix": | ||
| noise_prior, lengthscale_prior, _ = ( | ||
| THREESIX_NOISE_PRIOR(), | ||
| THREESIX_LENGTHSCALE_PRIOR(), | ||
| THREESIX_SCALE_PRIOR(), | ||
| ) | ||
| else: | ||
| noise_prior, lengthscale_prior = ( | ||
| HVARFNER_NOISE_PRIOR(), | ||
| HVARFNER_LENGTHSCALE_PRIOR(), | ||
| ) | ||
|
|
||
| self.noise_prior = noise_prior | ||
| if hyperparameters.continuous_kernel == "rbf": | ||
| self.continuous_kernel = RBFKernel( | ||
| ard=hyperparameters.ard, | ||
| lengthscale_prior=lengthscale_prior, | ||
| ) | ||
|
|
||
| elif hyperparameters.continuous_kernel == "matern_2.5": | ||
| self.continuous_kernel = MaternKernel( | ||
| ard=hyperparameters.ard, | ||
| lengthscale_prior=lengthscale_prior, | ||
| nu=2.5, | ||
| ) | ||
|
|
||
| elif hyperparameters.continuous_kernel == "matern_1.5": | ||
| self.continuous_kernel = MaternKernel( | ||
| ard=hyperparameters.ard, | ||
| lengthscale_prior=lengthscale_prior, | ||
| nu=1.5, | ||
| ) | ||
|
|
||
| else: | ||
| raise ValueError(f"Kernel {hyperparameters.kernel} not known.") | ||
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
Why are we setting this not over inheritance?
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
With this, TrainableSurrogate only specifies that any (non-abstract) implementation provides access to the generic hyperconfig. Therefore, TrainableSurrogate does not need to know what specific subtype of hyperconfig a surrogate implementation uses which gets rid of the generics.
hyperconfig_access is used in runners/trainable.py and other places.