|
10 | 10 | from bayes_opt import BayesianOptimization, acquisition |
11 | 11 | from bayes_opt.acquisition import AcquisitionFunction |
12 | 12 | from bayes_opt.domain_reduction import SequentialDomainReductionTransformer |
13 | | -from bayes_opt.event import DEFAULT_EVENTS, Events |
14 | 13 | from bayes_opt.exception import NotUniqueError |
15 | 14 | from bayes_opt.logger import ScreenLogger |
16 | 15 | from bayes_opt.parameter import BayesParameter |
@@ -161,38 +160,6 @@ def test_prime_queue_with_register_and_init(): |
161 | 160 | assert len(optimizer.space) == 1 |
162 | 161 |
|
163 | 162 |
|
164 | | -def test_prime_subscriptions(): |
165 | | - optimizer = BayesianOptimization(target_func, PBOUNDS, random_state=1) |
166 | | - optimizer._prime_subscriptions() |
167 | | - |
168 | | - # Test that the default observer is correctly subscribed |
169 | | - for event in DEFAULT_EVENTS: |
170 | | - assert all([isinstance(k, ScreenLogger) for k in optimizer._events[event]]) |
171 | | - assert all([hasattr(k, "update") for k in optimizer._events[event]]) |
172 | | - |
173 | | - test_subscriber = "test_subscriber" |
174 | | - |
175 | | - def test_callback(event, instance): |
176 | | - pass |
177 | | - |
178 | | - optimizer = BayesianOptimization(target_func, PBOUNDS, random_state=1) |
179 | | - optimizer.subscribe(event=Events.OPTIMIZATION_START, subscriber=test_subscriber, callback=test_callback) |
180 | | - # Test that the desired observer is subscribed |
181 | | - assert all([k == test_subscriber for k in optimizer._events[Events.OPTIMIZATION_START]]) |
182 | | - assert all([v == test_callback for v in optimizer._events[Events.OPTIMIZATION_START].values()]) |
183 | | - |
184 | | - # Check that prime subscriptions won't overwrite manual subscriptions |
185 | | - optimizer._prime_subscriptions() |
186 | | - assert all([k == test_subscriber for k in optimizer._events[Events.OPTIMIZATION_START]]) |
187 | | - assert all([v == test_callback for v in optimizer._events[Events.OPTIMIZATION_START].values()]) |
188 | | - |
189 | | - assert optimizer._events[Events.OPTIMIZATION_STEP] == {} |
190 | | - assert optimizer._events[Events.OPTIMIZATION_END] == {} |
191 | | - |
192 | | - with pytest.raises(KeyError): |
193 | | - optimizer._events["other"] |
194 | | - |
195 | | - |
196 | 163 | def test_set_bounds(): |
197 | 164 | pbounds = {"p1": (0, 1), "p3": (0, 3), "p2": (0, 2), "p4": (0, 4)} |
198 | 165 | optimizer = BayesianOptimization(target_func, pbounds, random_state=1) |
@@ -223,56 +190,27 @@ def test_set_gp_params(): |
223 | 190 |
|
224 | 191 |
|
225 | 192 | def test_maximize(): |
226 | | - class Tracker: |
227 | | - def __init__(self): |
228 | | - self.start_count = 0 |
229 | | - self.step_count = 0 |
230 | | - self.end_count = 0 |
231 | | - |
232 | | - def update_start(self, event, instance): |
233 | | - self.start_count += 1 |
234 | | - |
235 | | - def update_step(self, event, instance): |
236 | | - self.step_count += 1 |
237 | | - |
238 | | - def update_end(self, event, instance): |
239 | | - self.end_count += 1 |
240 | | - |
241 | | - def reset(self): |
242 | | - self.__init__() |
243 | | - |
244 | 193 | acq = acquisition.UpperConfidenceBound() |
245 | 194 | optimizer = BayesianOptimization( |
246 | 195 | target_func, PBOUNDS, acq, random_state=np.random.RandomState(1), allow_duplicate_points=True |
247 | 196 | ) |
248 | 197 |
|
249 | | - tracker = Tracker() |
250 | | - optimizer.subscribe(event=Events.OPTIMIZATION_START, subscriber=tracker, callback=tracker.update_start) |
251 | | - optimizer.subscribe(event=Events.OPTIMIZATION_STEP, subscriber=tracker, callback=tracker.update_step) |
252 | | - optimizer.subscribe(event=Events.OPTIMIZATION_END, subscriber=tracker, callback=tracker.update_end) |
253 | | - |
| 198 | + # Test initial maximize with no init_points and n_iter |
254 | 199 | optimizer.maximize(init_points=0, n_iter=0) |
255 | 200 | assert not optimizer._queue |
256 | | - assert len(optimizer.space) == 1 |
257 | | - assert tracker.start_count == 1 |
258 | | - assert tracker.step_count == 1 |
259 | | - assert tracker.end_count == 1 |
| 201 | + assert len(optimizer.space) == 1 # Even with no init_points, we should have at least one point |
260 | 202 |
|
| 203 | + # Test after setting GP parameters |
261 | 204 | optimizer.set_gp_params(alpha=1e-2) |
262 | 205 | optimizer.maximize(init_points=2, n_iter=0) |
263 | 206 | assert not optimizer._queue |
264 | | - assert len(optimizer.space) == 3 |
| 207 | + assert len(optimizer.space) == 3 # Previously had 1, add 2 more from init_points |
265 | 208 | assert optimizer._gp.alpha == 1e-2 |
266 | | - assert tracker.start_count == 2 |
267 | | - assert tracker.step_count == 3 |
268 | | - assert tracker.end_count == 2 |
269 | 209 |
|
| 210 | + # Test with additional iterations |
270 | 211 | optimizer.maximize(init_points=0, n_iter=2) |
271 | 212 | assert not optimizer._queue |
272 | | - assert len(optimizer.space) == 5 |
273 | | - assert tracker.start_count == 3 |
274 | | - assert tracker.step_count == 5 |
275 | | - assert tracker.end_count == 3 |
| 213 | + assert len(optimizer.space) == 5 # Previously had 3, add 2 more from n_iter |
276 | 214 |
|
277 | 215 |
|
278 | 216 | def test_define_wrong_transformer(): |
|
0 commit comments