Skip to content

Commit 39d1d42

Browse files
committed
Update, fix and expand tests
This commit updates old tests, fix broken ones, and adds new ones as well. NOTE: test_acceptance.py is commented out since it takes a while to run. However, it is arguably the most important test of all.
1 parent 09158d7 commit 39d1d42

12 files changed

+659
-346
lines changed

bayes_opt/bayesian_optimization.py

Lines changed: 9 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -28,12 +28,15 @@ def __next__(self):
2828
self._queue = self._queue[1:]
2929
return obj
3030

31+
def next(self):
32+
return self.__next__()
33+
3134
def add(self, obj):
3235
"""Add object to end of queue."""
3336
self._queue.append(obj)
3437

3538

36-
class Observable:
39+
class Observable(object):
3740
"""
3841
3942
Inspired/Taken from
@@ -147,11 +150,11 @@ def _prime_subscriptions(self):
147150
self.subscribe(Events.OPTMIZATION_END, _logger)
148151

149152
def maximize(self,
150-
init_points: int=5,
151-
n_iter: int=25,
152-
acq: str='ucb',
153-
kappa: float=2.576,
154-
xi: float=0.0,
153+
init_points=5,
154+
n_iter=25,
155+
acq='ucb',
156+
kappa=2.576,
157+
xi=0.0,
155158
**gp_params):
156159
"""Mazimize your function"""
157160
self._prime_subscriptions()

bayes_opt/observer.py

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,7 @@
11
"""
22
observers...
33
"""
4+
from __future__ import print_function
45
import os
56
import json
67
from datetime import datetime
@@ -14,7 +15,7 @@ def update(self, event, instance):
1415
raise NotImplementedError
1516

1617

17-
class _Tracker:
18+
class _Tracker(object):
1819
def __init__(self):
1920
self._iterations = 0
2021

bayes_opt/target_space.py

Lines changed: 12 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,5 @@
11
import numpy as np
2-
from .util import ensure_rng, unique_rows
2+
from .util import ensure_rng
33

44

55
def _hashable(x):
@@ -22,7 +22,7 @@ class TargetSpace(object):
2222
>>> y = space.register_point(x)
2323
>>> assert self.max_point()['max_val'] == y
2424
"""
25-
def __init__(self, target_func, pbounds: dict, random_state=None):
25+
def __init__(self, target_func, pbounds, random_state=None):
2626
"""
2727
Parameters
2828
----------
@@ -112,9 +112,15 @@ def _as_array(self, x):
112112
x = np.asarray(x, dtype=float)
113113
except TypeError:
114114
x = self.params_to_array(x)
115-
finally:
116-
x = x.ravel()
117-
assert x.size == self.dim, 'x must have the same dimensions'
115+
116+
x = x.ravel()
117+
try:
118+
assert x.size == self.dim
119+
except AssertionError:
120+
raise ValueError(
121+
"Size of array ({}) is different than the ".format(len(x)) +
122+
"expected number of parameters ({}).".format(len(self.keys))
123+
)
118124
return x
119125

120126
def register(self, params, target):
@@ -243,6 +249,6 @@ def set_bounds(self, new_bounds):
243249
new_bounds : dict
244250
A dictionary with the parameter name and its new bounds
245251
"""
246-
for row, key in enumerate(self._keys):
252+
for row, key in enumerate(self.keys):
247253
if key in new_bounds:
248254
self._bounds[row] = new_bounds[key]

bayes_opt/util.py

Lines changed: 24 additions & 24 deletions
Original file line numberDiff line numberDiff line change
@@ -156,30 +156,6 @@ def load_logs(optimizer, logs):
156156
return optimizer
157157

158158

159-
def unique_rows(a):
160-
"""
161-
A function to trim repeated rows that may appear when optimizing.
162-
This is necessary to avoid the sklearn GP object from breaking
163-
164-
:param a: array to trim repeated rows from
165-
166-
:return: mask of unique rows
167-
"""
168-
if a.size == 0:
169-
return np.empty((0,))
170-
171-
# Sort array and kep track of where things should go back to
172-
order = np.lexsort(a.T)
173-
reorder = np.argsort(order)
174-
175-
a = a[order]
176-
diff = np.diff(a, axis=0)
177-
ui = np.ones(len(a), 'bool')
178-
ui[1:] = (diff != 0).any(axis=1)
179-
180-
return ui[reorder]
181-
182-
183159
def ensure_rng(random_state=None):
184160
"""
185161
Creates a random number generator based on an optional seed. This can be
@@ -262,3 +238,27 @@ def underline(cls, s):
262238
def yellow(cls, s):
263239
"""Wrap text in yellow."""
264240
return cls._wrap_colour(s, cls.YELLOW)
241+
242+
243+
# def unique_rows(a):
244+
# """
245+
# A function to trim repeated rows that may appear when optimizing.
246+
# This is necessary to avoid the sklearn GP object from breaking
247+
248+
# :param a: array to trim repeated rows from
249+
250+
# :return: mask of unique rows
251+
# """
252+
# if a.size == 0:
253+
# return np.empty((0,))
254+
255+
# # Sort array and kep track of where things should go back to
256+
# order = np.lexsort(a.T)
257+
# reorder = np.argsort(order)
258+
259+
# a = a[order]
260+
# diff = np.diff(a, axis=0)
261+
# ui = np.ones(len(a), 'bool')
262+
# ui[1:] = (diff != 0).any(axis=1)
263+
264+
# return ui[reorder]

tests/test_acceptance.py

Lines changed: 69 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,69 @@
1+
# import numpy as np
2+
3+
# from bayes_opt import BayesianOptimization
4+
# from bayes_opt.util import ensure_rng
5+
6+
7+
# def test_simple_optimization():
8+
# """
9+
# ...
10+
# """
11+
# def f(x, y):
12+
# return -x ** 2 - (y - 1) ** 2 + 1
13+
14+
15+
# optimizer = BayesianOptimization(
16+
# f=f,
17+
# pbounds={"x": (-3, 3), "y": (-3, 3)},
18+
# random_state=12356,
19+
# verbose=0,
20+
# )
21+
22+
# optimizer.maximize(init_points=0, n_iter=25)
23+
24+
# max_target = optimizer.max["target"]
25+
# max_x = optimizer.max["params"]["x"]
26+
# max_y = optimizer.max["params"]["y"]
27+
28+
# assert (1 - max_target) < 1e-3
29+
# assert np.abs(max_x - 0) < 1e-1
30+
# assert np.abs(max_y - 1) < 1e-1
31+
32+
33+
# def test_intermediate_optimization():
34+
# """
35+
# ...
36+
# """
37+
# def f(x, y, z):
38+
# x_factor = np.exp(-(x - 2) ** 2) + (1 / (x ** 2 + 1))
39+
# y_factor = np.exp(-(y - 6) ** 2 / 10)
40+
# z_factor = (1 + 0.2 * np.cos(z)) / (1 + z ** 2)
41+
# return (x_factor + y_factor) * z_factor
42+
43+
# optimizer = BayesianOptimization(
44+
# f=f,
45+
# pbounds={"x": (-7, 7), "y": (-7, 7), "z": (-7, 7)},
46+
# random_state=56,
47+
# verbose=0,
48+
# )
49+
50+
# optimizer.maximize(init_points=0, n_iter=150)
51+
52+
# max_target = optimizer.max["target"]
53+
# max_x = optimizer.max["params"]["x"]
54+
# max_y = optimizer.max["params"]["y"]
55+
# max_z = optimizer.max["params"]["z"]
56+
57+
# assert (2.640 - max_target) < 0
58+
# assert np.abs(2 - max_x) < 1e-1
59+
# assert np.abs(6 - max_y) < 1e-1
60+
# assert np.abs(0 - max_z) < 1e-1
61+
62+
63+
# if __name__ == '__main__':
64+
# r"""
65+
# CommandLine:
66+
# python tests/test_bayesian_optimization.py
67+
# """
68+
# import pytest
69+
# pytest.main([__file__])

0 commit comments

Comments
 (0)