Skip to content

Commit 3c4c298

Browse files
committed
Update parameters, testing
1 parent f17c96a commit 3c4c298

File tree

6 files changed

+295
-141
lines changed

6 files changed

+295
-141
lines changed

bayes_opt/bayesian_optimization.py

Lines changed: 4 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -16,7 +16,7 @@
1616
from bayes_opt.constraint import ConstraintModel
1717
from bayes_opt.event import DEFAULT_EVENTS, Events
1818
from bayes_opt.logger import _get_default_logger
19-
from bayes_opt.parameter import wrap_kernel
19+
from bayes_opt.parameter import WrappedKernel
2020
from bayes_opt.target_space import TargetSpace
2121
from bayes_opt.util import ensure_rng
2222

@@ -152,7 +152,7 @@ def __init__(
152152

153153
# Internal GP regressor
154154
self._gp = GaussianProcessRegressor(
155-
kernel=wrap_kernel(Matern(nu=2.5), transform=self._space.kernel_transform),
155+
kernel=WrappedKernel(Matern(nu=2.5), transform=self._space.kernel_transform),
156156
alpha=1e-6,
157157
normalize_y=True,
158158
n_restarts_optimizer=5,
@@ -329,4 +329,6 @@ def set_bounds(self, new_bounds: BoundsMapping) -> None:
329329

330330
def set_gp_params(self, **params: Any) -> None:
331331
"""Set parameters of the internal Gaussian Process Regressor."""
332+
if "kernel" in params:
333+
params["kernel"] = WrappedKernel(params["kernel"], self._space.kernel_transform)
332334
self._gp.set_params(**params)

bayes_opt/constraint.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -9,7 +9,7 @@
99
from sklearn.gaussian_process import GaussianProcessRegressor
1010
from sklearn.gaussian_process.kernels import Matern
1111

12-
from bayes_opt.parameter import wrap_kernel
12+
from bayes_opt.parameter import WrappedKernel
1313

1414
if TYPE_CHECKING:
1515
from collections.abc import Callable
@@ -71,7 +71,7 @@ def __init__(
7171

7272
self._model = [
7373
GaussianProcessRegressor(
74-
kernel=wrap_kernel(Matern(nu=2.5), transform) if transform is not None else Matern(nu=2.5),
74+
kernel=WrappedKernel(Matern(nu=2.5), transform) if transform is not None else Matern(nu=2.5),
7575
alpha=1e-6,
7676
normalize_y=True,
7777
n_restarts_optimizer=5,

bayes_opt/parameter.py

Lines changed: 51 additions & 38 deletions
Original file line numberDiff line numberDiff line change
@@ -4,7 +4,6 @@
44

55
import abc
66
from collections.abc import Sequence
7-
from inspect import signature
87
from numbers import Number
98
from typing import TYPE_CHECKING, Any, Callable, Union
109

@@ -375,8 +374,6 @@ def to_float(self, value: Any) -> NDArray[Float]:
375374
"""
376375
res = np.zeros(len(self.categories))
377376
one_hot_index = [i for i, val in enumerate(self.categories) if val == value]
378-
if len(one_hot_index) != 1:
379-
raise ValueError
380377
res[one_hot_index] = 1
381378
return res.astype(float)
382379

@@ -432,7 +429,7 @@ def kernel_transform(self, value: NDArray[Float]) -> NDArray[Float]:
432429
"""
433430
value = np.atleast_2d(value)
434431
res = np.zeros(value.shape)
435-
res[np.argmax(value, axis=0)] = 1
432+
res[:, np.argmax(value, axis=1)] = 1
436433
return res
437434

438435
@property
@@ -441,52 +438,68 @@ def dim(self) -> int:
441438
return len(self.categories)
442439

443440

444-
def wrap_kernel(kernel: kernels.Kernel, transform: Callable[[Any], Any]) -> kernels.Kernel:
445-
"""Wrap a kernel to transform input data before passing it to the kernel.
441+
class WrappedKernel(kernels.Kernel):
442+
"""Wrap a kernel with a parameter transformation.
443+
444+
The transform function is applied to the input before passing it to the base kernel.
446445
447446
Parameters
448447
----------
449-
kernel : kernels.Kernel
450-
The kernel to wrap.
448+
base_kernel : kernels.Kernel
451449
452-
transform : Callable
453-
The transformation function to apply to the input data.
450+
transform : Callable[[Any], Any]
451+
"""
454452

455-
Returns
456-
-------
457-
kernels.Kernel
458-
The wrapped kernel.
453+
def __init__(self, base_kernel: kernels.Kernel, transform: Callable[[Any], Any]) -> None:
454+
super().__init__()
455+
self.base_kernel = base_kernel
456+
self.transform = transform
459457

460-
Notes
461-
-----
462-
See https://arxiv.org/abs/1805.03463 for more information.
463-
"""
464-
kernel_type = type(kernel)
458+
def __call__(self, X: NDArray[Float], Y: NDArray[Float] = None, eval_gradient: bool = False) -> Any:
459+
"""Return the kernel k(X, Y) and optionally its gradient after applying the transform.
465460
466-
class WrappedKernel(kernel_type):
467-
@copy_signature(getattr(kernel_type.__init__, "deprecated_original", kernel_type.__init__))
468-
def __init__(self, **kwargs: Any) -> None:
469-
super().__init__(**kwargs)
461+
For details, see the documentation of the base kernel.
470462
471-
def __call__(self, X: Any, Y: Any = None, eval_gradient: bool = False) -> Any:
472-
X = transform(X)
473-
return super().__call__(X, Y, eval_gradient)
463+
Parameters
464+
----------
465+
X : ndarray of shape (n_samples_X, n_features)
466+
Left argument of the returned kernel k(X, Y).
474467
475-
def __reduce__(self) -> str | tuple[Any, ...]:
476-
return (wrap_kernel, (kernel, transform))
468+
Y : ndarray of shape (n_samples_Y, n_features), default=None
469+
Right argument of the returned kernel k(X, Y). If None, k(X, X) is evaluated.
477470
478-
return WrappedKernel(**kernel.get_params())
471+
eval_gradient : bool, default=False
472+
Determines whether the gradient with respect to the kernel hyperparameter is calculated.
479473
474+
Returns
475+
-------
476+
K : ndarray of shape (n_samples_X, n_samples_Y)
480477
481-
def copy_signature(source_fct: Callable[..., Any]) -> Callable[[Callable[..., Any]], Callable[..., Any]]:
482-
"""Clones a signature from a source function to a target function.
478+
K_gradient : ndarray of shape (n_samples_X, n_samples_X, n_dims)
479+
"""
480+
X = self.transform(X)
481+
return self.base_kernel(X, Y, eval_gradient)
483482

484-
via
485-
https://stackoverflow.com/a/58989918/
486-
"""
483+
def is_stationary(self):
484+
"""Return whether the kernel is stationary."""
485+
return self.base_kernel.is_stationary()
487486

488-
def copy(target_fct: Callable[..., Any]) -> Callable[..., Any]:
489-
target_fct.__signature__ = signature(source_fct)
490-
return target_fct
487+
def diag(self, X: NDArray[Float]) -> NDArray[Float]:
488+
"""Return the diagonal of k(X, X).
491489
492-
return copy
490+
This method allows for more efficient calculations than calling
491+
np.diag(self(X)).
492+
493+
494+
Parameters
495+
----------
496+
X : array-like of shape (n_samples,)
497+
Left argument of the returned kernel k(X, Y)
498+
499+
Returns
500+
-------
501+
K_diag : ndarray of shape (n_samples_X,)
502+
Diagonal of kernel k(X, X)
503+
"""
504+
X = self.transform(X)
505+
return self.base_kernel.diag(X)

examples/parameter_types.ipynb

Lines changed: 188 additions & 97 deletions
Large diffs are not rendered by default.

examples/typed_hyperparameter_tuning.py

Lines changed: 0 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -49,15 +49,13 @@ def gboost(log_learning_rate, max_depth, min_samples_split):
4949

5050
continuous_optimizer = BayesianOptimization(
5151
f=gboost,
52-
acquisition_function=acquisition.ExpectedImprovement(1e-1),
5352
pbounds=continuous_pbounds,
5453
verbose=0,
5554
random_state=42,
5655
)
5756

5857
discrete_optimizer = BayesianOptimization(
5958
f=gboost,
60-
acquisition_function=acquisition.ExpectedImprovement(1e-1),
6159
pbounds=discrete_pbounds,
6260
verbose=0,
6361
random_state=42,

tests/test_parameter.py

Lines changed: 50 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,9 @@
11
from __future__ import annotations
22

33
import numpy as np
4+
import pytest
45

6+
from bayes_opt import BayesianOptimization
57
from bayes_opt.parameter import CategoricalParameter, FloatParameter, IntParameter
68
from bayes_opt.target_space import TargetSpace
79

@@ -38,6 +40,11 @@ def target_func(**kwargs):
3840

3941
assert (space.target == np.array([target1, target2])).all()
4042

43+
p1 = space._params_config["p1"]
44+
assert p1.to_float(0.2) == 0.2
45+
assert p1.to_float(np.array(2.3)) == 2.3
46+
assert p1.to_float(3) == 3.0
47+
4148

4249
def test_int_parameters():
4350
def target_func(**kwargs):
@@ -68,6 +75,15 @@ def target_func(**kwargs):
6875

6976
assert (space.target == np.array([target1, target2])).all()
7077

78+
p1 = space._params_config["p1"]
79+
assert p1.to_float(0) == 0.0
80+
assert p1.to_float(np.array(2)) == 2.0
81+
assert p1.to_float(3) == 3.0
82+
83+
assert p1.kernel_transform(0) == 0.0
84+
assert p1.kernel_transform(2.3) == 2.0
85+
assert p1.kernel_transform(np.array([1.3, 3.6, 7.2])) == pytest.approx(np.array([1, 4, 7]))
86+
7187

7288
def test_cat_parameters():
7389
fruit_ratings = {"apple": 1.0, "banana": 2.0, "mango": 5.0, "honeydew melon": -10.0, "strawberry": np.pi}
@@ -102,6 +118,23 @@ def target_func(fruit: str):
102118

103119
assert (space.target == np.array([target1, target2])).all()
104120

121+
p1 = space._params_config["fruit"]
122+
for i, fruit in enumerate(fruits):
123+
assert (p1.to_float(fruit) == np.eye(5)[i]).all()
124+
125+
assert (p1.kernel_transform(np.array([0.8, 0.2, 0.3, 0.5, 0.78])) == np.array([1, 0, 0, 0, 0])).all()
126+
assert (p1.kernel_transform(np.array([0.78, 0.2, 0.3, 0.5, 0.8])) == np.array([0, 0, 0, 0, 1.0])).all()
127+
128+
129+
def test_cateogrical_valid_bounds():
130+
pbounds = {"fruit": ("apple", "banana", "mango", "honeydew melon", "banana", "strawberry")}
131+
with pytest.raises(ValueError):
132+
TargetSpace(None, pbounds)
133+
134+
pbounds = {"fruit": ("apple",)}
135+
with pytest.raises(ValueError):
136+
TargetSpace(None, pbounds)
137+
105138

106139
def test_to_string():
107140
pbounds = {"p1": (0, 1), "p2": (1, 2)}
@@ -133,3 +166,20 @@ def test_to_string():
133166
assert space._params_config["fruit"].to_string("mango", 5) == "mango"
134167
assert space._params_config["fruit"].to_string("honeydew melon", 10) == "honeyde..."
135168
assert space._params_config["fruit"].to_string("strawberry", 10) == "strawberry"
169+
170+
171+
def test_integration_mixed_optimization():
172+
fruit_ratings = {"apple": 1.0, "banana": 2.0, "mango": 5.0, "honeydew melon": -10.0, "strawberry": np.pi}
173+
174+
pbounds = {
175+
"p1": (0, 1),
176+
"p2": (1, 2),
177+
"p3": (-1, 3, int),
178+
"fruit": ("apple", "banana", "mango", "honeydew melon", "strawberry"),
179+
}
180+
181+
def target_func(p1, p2, p3, fruit):
182+
return p1 + p2 + p3 + fruit_ratings[fruit]
183+
184+
optimizer = BayesianOptimization(target_func, pbounds)
185+
optimizer.maximize(init_points=2, n_iter=10)

0 commit comments

Comments
 (0)