Skip to content

Commit 4476271

Browse files
committed
Merge branch 'master' into parameter-types
2 parents 31223a9 + 58c9061 commit 4476271

File tree

2 files changed

+30
-6
lines changed

2 files changed

+30
-6
lines changed

bayes_opt/bayesian_optimization.py

Lines changed: 28 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -8,7 +8,9 @@
88

99
from collections import deque
1010
from typing import TYPE_CHECKING, Any
11+
from warnings import warn
1112

13+
import numpy as np
1214
from sklearn.gaussian_process import GaussianProcessRegressor
1315
from sklearn.gaussian_process.kernels import Matern
1416

@@ -24,7 +26,6 @@
2426
if TYPE_CHECKING:
2527
from collections.abc import Callable, Iterable, Mapping
2628

27-
import numpy as np
2829
from numpy.random import RandomState
2930
from numpy.typing import NDArray
3031
from scipy.optimize import NonlinearConstraint
@@ -168,6 +169,7 @@ def __init__(
168169
raise TypeError(msg)
169170
self._bounds_transformer.initialize(self._space)
170171

172+
self._sorting_warning_already_shown = False # TODO: remove in future version
171173
super().__init__(events=DEFAULT_EVENTS)
172174

173175
@property
@@ -219,6 +221,17 @@ def register(
219221
constraint_value: float or None
220222
Value of the constraint function at the observation, if any.
221223
"""
224+
# TODO: remove in future version
225+
if isinstance(params, np.ndarray) and not self._sorting_warning_already_shown:
226+
msg = (
227+
"You're attempting to register an np.ndarray. Currently, the optimizer internally sorts"
228+
" parameters by key and expects any registered array to respect this order. In future"
229+
" versions this behaviour will change and the order as given by the pbounds dictionary"
230+
" will be used. If you wish to retain sorted parameters, please manually sort your pbounds"
231+
" dictionary before constructing the optimizer."
232+
)
233+
warn(msg, stacklevel=1)
234+
self._sorting_warning_already_shown = True
222235
self._space.register(params, target, constraint_value)
223236
self.dispatch(Events.OPTIMIZATION_STEP)
224237

@@ -236,6 +249,18 @@ def probe(self, params: ParamsType, lazy: bool = True) -> None:
236249
If True, the optimizer will evaluate the points when calling
237250
maximize(). Otherwise it will evaluate it at the moment.
238251
"""
252+
# TODO: remove in future version
253+
if isinstance(params, np.ndarray) and not self._sorting_warning_already_shown:
254+
msg = (
255+
"You're attempting to register an np.ndarray. Currently, the optimizer internally sorts"
256+
" parameters by key and expects any registered array to respect this order. In future"
257+
" versions this behaviour will change and the order as given by the pbounds dictionary"
258+
" will be used. If you wish to retain sorted parameters, please manually sort your pbounds"
259+
" dictionary before constructing the optimizer."
260+
)
261+
warn(msg, stacklevel=1)
262+
self._sorting_warning_already_shown = True
263+
params = self._space.array_to_params(params)
239264
if lazy:
240265
self._queue.append(params)
241266
else:
@@ -264,9 +289,8 @@ def _prime_queue(self, init_points: int) -> None:
264289
init_points = max(init_points, 1)
265290

266291
for _ in range(init_points):
267-
self._queue.append(
268-
self._space.array_to_params(self._space.random_sample(random_state=self._random_state))
269-
)
292+
sample = self._space.random_sample(random_state=self._random_state)
293+
self._queue.append(self._space.array_to_params(sample))
270294

271295
def _prime_subscriptions(self) -> None:
272296
if not any([len(subs) for subs in self._events.values()]):

pyproject.toml

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,6 @@
11
[tool.poetry]
22
name = "bayesian-optimization"
3-
version = "1.5.1"
3+
version = "2.0.2"
44
description = "Bayesian Optimization package"
55
authors = ["Fernando Nogueira"]
66
license = "MIT"
@@ -52,4 +52,4 @@ build-backend = "poetry.core.masonry.api"
5252
exclude_lines = [
5353
"pragma: no cover",
5454
"if TYPE_CHECKING:",
55-
]
55+
]

0 commit comments

Comments
 (0)