-
Notifications
You must be signed in to change notification settings - Fork 60
Callback #73
New issue
Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.
By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.
Already on GitHub? Sign in to your account
Callback #73
Changes from 5 commits
2fd99a6
98aad5e
107b5c5
abcff50
4d9b669
58b9482
86fedb1
18a4436
1282569
cb8421c
3331fa0
0d5d4bb
339b699
File filter
Filter by extension
Conversations
Jump to
Diff view
Diff view
There are no files selected for viewing
| Original file line number | Diff line number | Diff line change |
|---|---|---|
|
|
@@ -168,7 +168,7 @@ def models(self): | |
|
|
||
| :return: list of GPflow models | ||
| """ | ||
| return self._models | ||
| return self._models.sorted_params | ||
|
|
||
| @property | ||
| def data(self): | ||
|
|
@@ -296,7 +296,7 @@ def _optimize_models(self): | |
|
|
||
| @Acquisition.models.getter | ||
| def models(self): | ||
| return ParamList([model for acq in self.operands for model in acq.models.sorted_params]) | ||
| return [model for acq in self.operands for model in acq.models] | ||
|
|
||
| def enable_scaling(self, domain): | ||
| for oper in self.operands: | ||
|
|
@@ -375,27 +375,31 @@ class MCMCAcquistion(AcquisitionSum): | |
| """ | ||
| Apply MCMC over the hyperparameters of an acquisition function (= over the hyperparameters of the contained models). | ||
|
|
||
| The models passed into an object of this class are optimized with MLE, and then further sampled with HMC. | ||
| These hyperparameter samples are then set in copies of the acquisition. | ||
| The models passed into an object of this class are optimized with MLE (fast burn-in), and then further sampled with | ||
| HMC. These hyperparameter samples are then set in copies of the acquisition. | ||
|
|
||
| For evaluating the underlying acquisition function, the predictions of the acquisition copies are averaged. | ||
| """ | ||
| def __init__(self, acquisition, n_slices, **kwargs): | ||
| assert isinstance(acquisition, Acquisition) | ||
| assert n_slices > 0 | ||
|
|
||
| copies = [copy.deepcopy(acquisition) for _ in range(n_slices - 1)] | ||
| for c in copies: | ||
| c.optimize_restarts = 0 | ||
|
|
||
| # the call to the constructor of the parent classes, will optimize acquisition, so it obtains the MLE solution. | ||
| super(MCMCAcquistion, self).__init__([acquisition] + copies) | ||
| super(MCMCAcquistion, self).__init__([acquisition]*n_slices) | ||
| self._needs_new_copies = True | ||
| self._sample_opt = kwargs | ||
|
|
||
| def _optimize_models(self): | ||
| # Optimize model #1 | ||
| self.operands[0]._optimize_models() | ||
|
|
||
| # Copy it again if needed due to changed free state | ||
| if self._needs_new_copies: | ||
| new_copies = [copy.deepcopy(self.operands[0]) for _ in range(len(self.operands) - 1)] | ||
|
Contributor
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. copy.deepcopy([self.operands[0]]*len(self.operands)) not tested, works too?
Member
Author
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. no, the * syntax are shallow copies so the deepcopy will copy the object they are all pointing to. |
||
| for c in new_copies: | ||
| c.optimize_restarts = 0 | ||
| self.operands = ParamList([self.operands[0]] + new_copies) | ||
| self._needs_new_copies = False | ||
|
|
||
| # Draw samples using HMC | ||
| # Sample each model of the acquisition function - results in a list of 2D ndarrays. | ||
| hypers = np.hstack([model.sample(len(self.operands), **self._sample_opt) for model in self.models]) | ||
|
|
@@ -419,3 +423,11 @@ def set_data(self, X, Y): | |
| def build_acquisition(self, Xcand): | ||
| # Average the predictions of the copies. | ||
| return 1. / len(self.operands) * super(MCMCAcquistion, self).build_acquisition(Xcand) | ||
|
|
||
| def _kill_autoflow(self): | ||
| """ | ||
| Following the recompilation of models, the free state might have changed. This means updating the samples can | ||
|
Contributor
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. """ Following the ... |
||
| cause inconsistencies and errors. Flag for recreation on next optimize | ||
| """ | ||
| super(MCMCAcquistion, self)._kill_autoflow() | ||
| self._needs_new_copies = True | ||
|
Contributor
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. I assume we cant use needs_setup for this?
Member
Author
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. _needs_setup is triggered by a simple |
||
| Original file line number | Diff line number | Diff line change |
|---|---|---|
|
|
@@ -18,9 +18,9 @@ | |
| from scipy.optimize import OptimizeResult | ||
|
|
||
| from .acquisition import Acquisition, MCMCAcquistion | ||
| from .optim import Optimizer, SciPyOptimizer | ||
| from .objective import ObjectiveWrapper | ||
| from .design import Design, EmptyDesign | ||
| from .objective import ObjectiveWrapper | ||
| from .optim import Optimizer, SciPyOptimizer | ||
| from .pareto import non_dominated_sort | ||
|
|
||
|
|
||
|
|
@@ -32,7 +32,8 @@ class BayesianOptimizer(Optimizer): | |
| Additionally, it is configured with a separate optimizer for the acquisition function. | ||
| """ | ||
|
|
||
| def __init__(self, domain, acquisition, optimizer=None, initial=None, scaling=True, hyper_draws=None): | ||
| def __init__(self, domain, acquisition, optimizer=None, initial=None, scaling=True, hyper_draws=None, | ||
| callback=None): | ||
| """ | ||
| :param Domain domain: The optimization space. | ||
| :param Acquisition acquisition: The acquisition function to optimize over the domain. | ||
|
|
@@ -51,6 +52,12 @@ def __init__(self, domain, acquisition, optimizer=None, initial=None, scaling=Tr | |
| are obtained using Hamiltonian MC. | ||
| (see `GPflow documentation <https://gpflow.readthedocs.io/en/latest//>`_ for details) for each model. | ||
| The acquisition score is computed for each draw, and averaged. | ||
| :param callable callback: (optional) this function or object will be called after each evaluate, after the | ||
|
||
| data of all models has been updated with all models as retrieved by acquisition.models as argument without | ||
| the wrapping model handling any scaling . This allows custom model optimization strategies to be implemented. | ||
|
Contributor
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. if we do a separate callbacks.py file some of the explanation can be moved there + module link
Member
Author
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. see above |
||
| All manipulations of GPflow models are permitted. Combined with the optimize_restarts parameter of | ||
| :class:`~.Acquisition` this allows several scenarios: do the optimization manually from the callback | ||
| (optimize_restarts equals zero), orchoose the starting point + some random restarts (optimize_restarts > 0). | ||
|
||
| """ | ||
| assert isinstance(acquisition, Acquisition) | ||
| assert hyper_draws is None or hyper_draws > 0 | ||
|
|
@@ -69,6 +76,8 @@ def __init__(self, domain, acquisition, optimizer=None, initial=None, scaling=Tr | |
| initial = initial or EmptyDesign(domain) | ||
| self.set_initial(initial.generate()) | ||
|
|
||
| self._iter_callback = callback | ||
|
||
|
|
||
| @Optimizer.domain.setter | ||
| def domain(self, dom): | ||
| assert (self.domain.size == dom.size) | ||
|
|
@@ -86,6 +95,8 @@ def _update_model_data(self, newX, newY): | |
| assert self.acquisition.data[0].shape[1] == newX.shape[-1] | ||
| assert self.acquisition.data[1].shape[1] == newY.shape[-1] | ||
| assert newX.shape[0] == newY.shape[0] | ||
| if newX.size == 0: | ||
|
Contributor
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. will this ever happen? As far as I know we cant empty GPflow models so data[0] will never be empty.
Member
Author
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. this line avoids _needs_setup = True in case i.e. the EmptyDesign is configured as initial design (as is by default)
Contributor
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. As a sidenote, as GPflow doesn't support models with no data I actually see no use case for BOptimizer having an initial design parameter. |
||
| return | ||
| X = np.vstack((self.acquisition.data[0], newX)) | ||
| Y = np.vstack((self.acquisition.data[1], newY)) | ||
| self.acquisition.set_data(X, Y) | ||
|
|
@@ -175,8 +186,7 @@ def _optimize(self, fx, n_iter): | |
| :return: OptimizeResult object | ||
| """ | ||
|
|
||
| assert(isinstance(fx, ObjectiveWrapper)) | ||
|
|
||
| assert (isinstance(fx, ObjectiveWrapper)) | ||
| # Evaluate and add the initial design (if any) | ||
| initial = self.get_initial() | ||
| values = fx(initial) | ||
|
|
@@ -190,6 +200,10 @@ def inverse_acquisition(x): | |
|
|
||
| # Optimization loop | ||
| for i in range(n_iter): | ||
| # If callback specified, and acquisition has the setup flag enabled (indicating an upcoming compilation, | ||
| # run the callback. | ||
| if self._iter_callback and self.acquisition._needs_setup: | ||
| self._iter_callback([m.wrapped for m in self.acquisition.models]) | ||
|
||
| result = self.optimizer.optimize(inverse_acquisition) | ||
| self._update_model_data(result.x, fx(result.x)) | ||
|
|
||
|
|
||
| Original file line number | Diff line number | Diff line change |
|---|---|---|
|
|
@@ -106,3 +106,4 @@ def test_predict_scaling(self): | |
| fs = n.predict_density(Xt, Yt) | ||
| np.testing.assert_allclose(fr, fs, rtol=1e-2) | ||
|
|
||
|
|
||
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
Does this make deep copies? I assumed you used the old way to assure that it were deep copies
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
Ah I see, need_new_copies = True makes sure deep copies are made later
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
This version does shallow copies, its mostly to assure the copy later on is aware of the amount of copies required without serious overhead.