Skip to content

Commit 264b79e

Browse files
committed
Remove sorting, gradient optimize only continuous params
1 parent 3c4c298 commit 264b79e

File tree

10 files changed

+216
-139
lines changed

10 files changed

+216
-139
lines changed

bayes_opt/acquisition.py

Lines changed: 45 additions & 17 deletions
Original file line numberDiff line numberDiff line change
@@ -215,15 +215,22 @@ def _acq_min(
215215
if n_random == 0 and n_l_bfgs_b == 0:
216216
error_msg = "Either n_random or n_l_bfgs_b needs to be greater than 0."
217217
raise ValueError(error_msg)
218-
x_min_r, min_acq_r = self._random_sample_minimize(acq, space, n_random=n_random)
219-
x_min_l, min_acq_l = self._l_bfgs_b_minimize(acq, space, n_x_seeds=n_l_bfgs_b)
220-
# Either n_random or n_l_bfgs_b is not 0 => at least one of x_min_r and x_min_l is not None
221-
if min_acq_r < min_acq_l:
222-
return x_min_r
223-
return x_min_l
218+
x_min_r, min_acq_r, x_seeds = self._random_sample_minimize(
219+
acq, space, n_random=max(n_random, n_l_bfgs_b), n_x_seeds=n_l_bfgs_b
220+
)
221+
if n_l_bfgs_b:
222+
x_min_l, min_acq_l = self._l_bfgs_b_minimize(acq, space, x_seeds=x_seeds)
223+
# Either n_random or n_l_bfgs_b is not 0 => at least one of x_min_r and x_min_l is not None
224+
if min_acq_r > min_acq_l:
225+
return x_min_l
226+
return x_min_r
224227

225228
def _random_sample_minimize(
226-
self, acq: Callable[[NDArray[Float]], NDArray[Float]], space: TargetSpace, n_random: int
229+
self,
230+
acq: Callable[[NDArray[Float]], NDArray[Float]],
231+
space: TargetSpace,
232+
n_random: int,
233+
n_x_seeds: int = 0,
227234
) -> tuple[NDArray[Float] | None, float]:
228235
"""Random search to find the minimum of `acq` function.
229236
@@ -238,6 +245,8 @@ def _random_sample_minimize(
238245
n_random : int
239246
Number of random samples to use.
240247
248+
n_x_seeds : int
249+
Number of top points to return, for use as starting points for L-BFGS-B.
241250
Returns
242251
-------
243252
x_min : np.ndarray
@@ -252,10 +261,18 @@ def _random_sample_minimize(
252261
ys = acq(x_tries)
253262
x_min = x_tries[ys.argmin()]
254263
min_acq = ys.min()
255-
return x_min, min_acq
264+
if n_x_seeds != 0:
265+
idxs = np.argsort(ys)[-n_x_seeds:]
266+
x_seeds = x_tries[idxs]
267+
else:
268+
x_seeds = []
269+
return x_min, min_acq, x_seeds
256270

257271
def _l_bfgs_b_minimize(
258-
self, acq: Callable[[NDArray[Float]], NDArray[Float]], space: TargetSpace, n_x_seeds: int = 10
272+
self,
273+
acq: Callable[[NDArray[Float]], NDArray[Float]],
274+
space: TargetSpace,
275+
x_seeds: NDArray[Float] | None = None,
259276
) -> tuple[NDArray[Float] | None, float]:
260277
"""Random search to find the minimum of `acq` function.
261278
@@ -267,8 +284,8 @@ def _l_bfgs_b_minimize(
267284
space : TargetSpace
268285
The target space over which to optimize.
269286
270-
n_x_seeds : int
271-
Number of starting points for the L-BFGS-B optimizer.
287+
x_seeds : int
288+
Starting points for the L-BFGS-B optimizer.
272289
273290
Returns
274291
-------
@@ -278,24 +295,35 @@ def _l_bfgs_b_minimize(
278295
min_acq : float
279296
Acquisition function value at `x_min`
280297
"""
281-
if n_x_seeds == 0:
282-
return None, np.inf
283-
x_seeds = space.random_sample(n_x_seeds, random_state=self.random_state)
298+
continuous_dimensions = space.continuous_dimensions
299+
continuous_bounds = space.bounds[continuous_dimensions]
300+
301+
if not continuous_dimensions.any():
302+
min_acq = np.inf
303+
x_min = np.array([np.nan] * space.bounds.shape[0])
304+
return x_min, min_acq
284305

285306
min_acq: float | None = None
286307
x_try: NDArray[Float]
287308
x_min: NDArray[Float]
288309
for x_try in x_seeds:
289-
# Find the minimum of minus the acquisition function
290-
res: OptimizeResult = minimize(acq, x_try, bounds=space.bounds, method="L-BFGS-B")
291310

311+
def continuous_acq(x: NDArray[Float], x_try=x_try) -> NDArray[Float]:
312+
x_try[continuous_dimensions] = x
313+
return acq(x_try)
314+
315+
# Find the minimum of minus the acquisition function
316+
res: OptimizeResult = minimize(
317+
continuous_acq, x_try[continuous_dimensions], bounds=continuous_bounds, method="L-BFGS-B"
318+
)
292319
# See if success
293320
if not res.success:
294321
continue
295322

296323
# Store it if better than previous minimum(maximum).
297324
if min_acq is None or np.squeeze(res.fun) >= min_acq:
298-
x_min = res.x
325+
x_try[continuous_dimensions] = res.x
326+
x_min = x_try
299327
min_acq = np.squeeze(res.fun)
300328

301329
if min_acq is None:

bayes_opt/bayesian_optimization.py

Lines changed: 8 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -14,6 +14,7 @@
1414

1515
from bayes_opt import acquisition
1616
from bayes_opt.constraint import ConstraintModel
17+
from bayes_opt.domain_reduction import DomainTransformer
1718
from bayes_opt.event import DEFAULT_EVENTS, Events
1819
from bayes_opt.logger import _get_default_logger
1920
from bayes_opt.parameter import WrappedKernel
@@ -162,11 +163,10 @@ def __init__(
162163
self._verbose = verbose
163164
self._bounds_transformer = bounds_transformer
164165
if self._bounds_transformer:
165-
try:
166-
self._bounds_transformer.initialize(self._space)
167-
except (AttributeError, TypeError) as exc:
168-
error_msg = "The transformer must be an instance of DomainTransformer"
169-
raise TypeError(error_msg) from exc
166+
if not isinstance(self._bounds_transformer, DomainTransformer):
167+
msg = "The transformer must be an instance of DomainTransformer"
168+
raise TypeError(msg)
169+
self._bounds_transformer.initialize(self._space)
170170

171171
super().__init__(events=DEFAULT_EVENTS)
172172

@@ -330,5 +330,7 @@ def set_bounds(self, new_bounds: BoundsMapping) -> None:
330330
def set_gp_params(self, **params: Any) -> None:
331331
"""Set parameters of the internal Gaussian Process Regressor."""
332332
if "kernel" in params:
333-
params["kernel"] = WrappedKernel(params["kernel"], self._space.kernel_transform)
333+
params["kernel"] = WrappedKernel(
334+
base_kernel=params["kernel"], transform=self._space.kernel_transform
335+
)
334336
self._gp.set_params(**params)

bayes_opt/domain_reduction.py

Lines changed: 6 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -75,13 +75,7 @@ def __init__(
7575
self.gamma_pan = gamma_pan
7676
self.eta = eta
7777

78-
self.minimum_window_value: NDArray[Float] | Sequence[float] | float
79-
if isinstance(minimum_window, Mapping):
80-
self.minimum_window_value = [
81-
item[1] for item in sorted(minimum_window.items(), key=lambda x: x[0])
82-
]
83-
else:
84-
self.minimum_window_value = minimum_window
78+
self.minimum_window_value = minimum_window
8579

8680
def initialize(self, target_space: TargetSpace) -> None:
8781
"""Initialize all of the parameters.
@@ -91,6 +85,11 @@ def initialize(self, target_space: TargetSpace) -> None:
9185
target_space : TargetSpace
9286
TargetSpace this DomainTransformer operates on.
9387
"""
88+
if isinstance(self.minimum_window_value, Mapping):
89+
self.minimum_window_value = [self.minimum_window_value[key] for key in target_space.keys]
90+
else:
91+
self.minimum_window_value = self.minimum_window_value
92+
9493
any_not_float = any([not isinstance(p, FloatParameter) for p in target_space._params_config.values()])
9594
if any_not_float:
9695
msg = "Domain reduction is only supported for all-FloatParameter optimization."

bayes_opt/parameter.py

Lines changed: 21 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -59,6 +59,11 @@ def bounds(self) -> NDArray[Any]:
5959
"""The bounds of the parameter in float space."""
6060
return self._bounds
6161

62+
@property
63+
@abc.abstractmethod
64+
def is_continuous(self) -> bool:
65+
"""Whether the parameter is continuous."""
66+
6267
def random_sample(
6368
self, n_samples: int, random_state: np.random.RandomState | int | None
6469
) -> NDArray[Float]:
@@ -161,6 +166,11 @@ class FloatParameter(BayesParameter):
161166
def __init__(self, name: str, bounds: tuple[float, float]) -> None:
162167
super().__init__(name, np.array(bounds))
163168

169+
@property
170+
def is_continuous(self) -> bool:
171+
"""Whether the parameter is continuous."""
172+
return True
173+
164174
def to_float(self, value: float) -> float:
165175
"""Convert a parameter value to a float.
166176
@@ -248,6 +258,11 @@ class IntParameter(BayesParameter):
248258
def __init__(self, name: str, bounds: tuple[int, int]) -> None:
249259
super().__init__(name, np.array(bounds))
250260

261+
@property
262+
def is_continuous(self) -> bool:
263+
"""Whether the parameter is continuous."""
264+
return False
265+
251266
def random_sample(
252267
self, n_samples: int, random_state: np.random.RandomState | int | None
253268
) -> NDArray[Float]:
@@ -340,6 +355,11 @@ def __init__(self, name: str, categories: Sequence[Any]) -> None:
340355
bounds = np.vstack((lower, upper)).T
341356
super().__init__(name, bounds)
342357

358+
@property
359+
def is_continuous(self) -> bool:
360+
"""Whether the parameter is continuous."""
361+
return False
362+
343363
def random_sample(
344364
self, n_samples: int, random_state: np.random.RandomState | int | None
345365
) -> NDArray[Float]:
@@ -478,6 +498,7 @@ def __call__(self, X: NDArray[Float], Y: NDArray[Float] = None, eval_gradient: b
478498
K_gradient : ndarray of shape (n_samples_X, n_samples_X, n_dims)
479499
"""
480500
X = self.transform(X)
501+
Y = self.transform(Y) if Y is not None else None
481502
return self.base_kernel(X, Y, eval_gradient)
482503

483504
def is_stationary(self):

bayes_opt/target_space.py

Lines changed: 28 additions & 14 deletions
Original file line numberDiff line numberDiff line change
@@ -82,7 +82,7 @@ def __init__(
8282
self.target_func = target_func
8383

8484
# Get the name of the parameters
85-
self._keys: list[str] = sorted(pbounds)
85+
self._keys: list[str] = list(pbounds.keys())
8686

8787
self._params_config = self.make_params(pbounds)
8888
self._dim = sum([self._params_config[key].dim for key in self._keys])
@@ -180,6 +180,11 @@ def keys(self) -> list[str]:
180180
"""
181181
return self._keys
182182

183+
@property
184+
def params_config(self) -> dict[str, BayesParameter]:
185+
"""Get the parameters configuration."""
186+
return self._params_config
187+
183188
@property
184189
def bounds(self) -> NDArray[Float]:
185190
"""Get the bounds of this TargetSpace.
@@ -210,6 +215,20 @@ def masks(self) -> dict[str, NDArray[np.bool_]]:
210215
"""
211216
return self._masks
212217

218+
@property
219+
def continuous_dimensions(self) -> NDArray[np.bool_]:
220+
"""Get the continuous parameters.
221+
222+
Returns
223+
-------
224+
dict
225+
"""
226+
result = np.zeros(self.dim, dtype=bool)
227+
masks = self.masks
228+
for key in self.keys:
229+
result[masks[key]] = self._params_config[key].is_continuous
230+
return result
231+
213232
def make_params(self, pbounds: BoundsMapping) -> dict[str, BayesParameter]:
214233
"""Create a dictionary of parameters from a dictionary of bounds.
215234
@@ -226,7 +245,7 @@ def make_params(self, pbounds: BoundsMapping) -> dict[str, BayesParameter]:
226245
parameter objects as values.
227246
"""
228247
params: dict[str, BayesParameter] = {}
229-
for key in sorted(pbounds):
248+
for key in pbounds:
230249
pbound = pbounds[key]
231250

232251
if isinstance(pbound, BayesParameter):
@@ -285,8 +304,7 @@ def params_to_array(self, params: Mapping[str, float | NDArray[Float]]) -> NDArr
285304
"""
286305
if set(params) != set(self.keys):
287306
error_msg = (
288-
f"Parameters' keys ({sorted(params)}) do "
289-
f"not match the expected set of keys ({self.keys})."
307+
f"Parameters' keys ({params}) do " f"not match the expected set of keys ({self.keys})."
290308
)
291309
raise ValueError(error_msg)
292310
return self._to_float(params)
@@ -337,9 +355,7 @@ def array_to_params(self, x: NDArray[Float]) -> dict[str, float | NDArray[Float]
337355

338356
def _to_float(self, value: Mapping[str, float | NDArray[Float]]) -> NDArray[Float]:
339357
if set(value) != set(self.keys):
340-
msg = (
341-
f"Parameters' keys ({sorted(value)}) do " f"not match the expected set of keys ({self.keys})."
342-
)
358+
msg = f"Parameters' keys ({value}) do " f"not match the expected set of keys ({self.keys})."
343359
raise ValueError(msg)
344360
res = np.zeros(self._dim)
345361
for key in self._keys:
@@ -389,8 +405,7 @@ def _as_array(self, x: Any) -> NDArray[Float]:
389405
x = x.ravel()
390406
if x.size != self.dim:
391407
error_msg = (
392-
f"Size of array ({len(x)}) is different than the "
393-
f"expected number of parameters ({len(self.keys)})."
408+
f"Size of array ({len(x)}) is different than the " f"expected number of ({len(self.dim)})."
394409
)
395410
raise ValueError(error_msg)
396411
return x
@@ -666,8 +681,7 @@ def set_bounds(self, new_bounds: BoundsMapping) -> None:
666681
new_bounds : dict
667682
A dictionary with the parameter name and its new bounds
668683
"""
669-
print(new_bounds)
670-
new__params_config = self.make_params(new_bounds)
684+
new_params_config = self.make_params(new_bounds)
671685

672686
for key in self.keys:
673687
if key in new_bounds:
@@ -676,12 +690,12 @@ def set_bounds(self, new_bounds: BoundsMapping) -> None:
676690
) == set(new_bounds[key]):
677691
msg = "Changing bounds of categorical parameters is not supported"
678692
raise NotImplementedError(msg)
679-
if not isinstance(new__params_config[key], type(self._params_config[key])):
693+
if not isinstance(new_params_config[key], type(self._params_config[key])):
680694
msg = (
681-
f"Parameter type {type(new__params_config[key])} of"
695+
f"Parameter type {type(new_params_config[key])} of"
682696
" new bounds does not match parameter type"
683697
f" {type(self._params_config[key])} of old bounds"
684698
)
685699
raise ValueError(msg)
686-
self._params_config[key] = new__params_config[key]
700+
self._params_config[key] = new_params_config[key]
687701
self._bounds = self.calculate_bounds()

examples/parameter_types.ipynb

Lines changed: 89 additions & 82 deletions
Large diffs are not rendered by default.

scripts/format.sh

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -2,5 +2,5 @@
22
set -ex
33

44
poetry run ruff format bayes_opt tests
5-
poetry run ruff check bayes_opt tests --fix
5+
poetry run ruff check bayes_opt --fix
66

tests/test_acquisition.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -114,7 +114,7 @@ def fun(x):
114114
except IndexError:
115115
return np.nan
116116

117-
_, min_acq_l = acq._l_bfgs_b_minimize(fun, space=target_space, n_x_seeds=1)
117+
_, min_acq_l = acq._l_bfgs_b_minimize(fun, space=target_space, x_seeds=np.array([[2.5, 0.5]]))
118118
assert min_acq_l == np.inf
119119

120120

tests/test_bayesian_optimization.py

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -38,7 +38,7 @@ def test_register():
3838
assert len(optimizer.res) == 1
3939
assert len(optimizer.space) == 1
4040

41-
optimizer.space.register(params={"p1": 5, "p2": 4}, target=9)
41+
optimizer.space.register(params=np.array([5, 4]), target=9)
4242
assert len(optimizer.res) == 2
4343
assert len(optimizer.space) == 2
4444

@@ -196,12 +196,12 @@ def test_set_bounds():
196196
# Ignore unknown keys
197197
optimizer.set_bounds({"other": (7, 8)})
198198
assert all(optimizer.space.bounds[:, 0] == np.array([0, 0, 0, 0]))
199-
assert all(optimizer.space.bounds[:, 1] == np.array([1, 2, 3, 4]))
199+
assert all(optimizer.space.bounds[:, 1] == np.array([1, 3, 2, 4]))
200200

201201
# Update bounds accordingly
202202
optimizer.set_bounds({"p2": (1, 8)})
203-
assert all(optimizer.space.bounds[:, 0] == np.array([0, 1, 0, 0]))
204-
assert all(optimizer.space.bounds[:, 1] == np.array([1, 8, 3, 4]))
203+
assert all(optimizer.space.bounds[:, 0] == np.array([0, 0, 1, 0]))
204+
assert all(optimizer.space.bounds[:, 1] == np.array([1, 3, 8, 4]))
205205

206206

207207
def test_set_gp_params():

0 commit comments

Comments
 (0)