Skip to content

Commit 05add75

Browse files
committed
remove observable pattern, jsonlogger, and any use of events
1 parent 626aafe commit 05add75

File tree

6 files changed

+95
-274
lines changed

6 files changed

+95
-274
lines changed

bayes_opt/__init__.py

Lines changed: 2 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -5,10 +5,10 @@
55
import importlib.metadata
66

77
from bayes_opt import acquisition
8-
from bayes_opt.bayesian_optimization import BayesianOptimization, Events
8+
from bayes_opt.bayesian_optimization import BayesianOptimization
99
from bayes_opt.constraint import ConstraintModel
1010
from bayes_opt.domain_reduction import SequentialDomainReductionTransformer
11-
from bayes_opt.logger import JSONLogger, ScreenLogger
11+
from bayes_opt.logger import ScreenLogger
1212
from bayes_opt.target_space import TargetSpace
1313

1414
__version__ = importlib.metadata.version("bayesian-optimization")
@@ -19,8 +19,6 @@
1919
"BayesianOptimization",
2020
"TargetSpace",
2121
"ConstraintModel",
22-
"Events",
2322
"ScreenLogger",
24-
"JSONLogger",
2523
"SequentialDomainReductionTransformer",
2624
]

bayes_opt/bayesian_optimization.py

Lines changed: 35 additions & 45 deletions
Original file line numberDiff line numberDiff line change
@@ -21,14 +21,13 @@
2121
from bayes_opt import acquisition
2222
from bayes_opt.constraint import ConstraintModel
2323
from bayes_opt.domain_reduction import DomainTransformer
24-
from bayes_opt.event import DEFAULT_EVENTS, Events
25-
from bayes_opt.logger import _get_default_logger
24+
from bayes_opt.logger import ScreenLogger
2625
from bayes_opt.parameter import wrap_kernel
2726
from bayes_opt.target_space import TargetSpace
2827
from bayes_opt.util import ensure_rng
2928

3029
if TYPE_CHECKING:
31-
from collections.abc import Callable, Iterable, Mapping
30+
from collections.abc import Callable, Mapping
3231

3332
from numpy.random import RandomState
3433
from numpy.typing import NDArray
@@ -41,35 +40,7 @@
4140
Float = np.floating[Any]
4241

4342

44-
class Observable:
45-
"""Inspired by https://www.protechtraining.com/blog/post/879#simple-observer."""
46-
47-
def __init__(self, events: Iterable[Any]) -> None:
48-
# maps event names to subscribers
49-
# str -> dict
50-
self._events = {event: dict() for event in events}
51-
52-
def get_subscribers(self, event: Any) -> Any:
53-
"""Return the subscribers of an event."""
54-
return self._events[event]
55-
56-
def subscribe(self, event: Any, subscriber: Any, callback: Callable[..., Any] | None = None) -> None:
57-
"""Add subscriber to an event."""
58-
if callback is None:
59-
callback = subscriber.update
60-
self.get_subscribers(event)[subscriber] = callback
61-
62-
def unsubscribe(self, event: Any, subscriber: Any) -> None:
63-
"""Remove a subscriber for a particular event."""
64-
del self.get_subscribers(event)[subscriber]
65-
66-
def dispatch(self, event: Any) -> None:
67-
"""Trigger callbacks for subscribers of an event."""
68-
for callback in self.get_subscribers(event).values():
69-
callback(event, self)
70-
71-
72-
class BayesianOptimization(Observable):
43+
class BayesianOptimization:
7344
"""Handle optimization of a target function over a specific target space.
7445
7546
This class takes the function to optimize as well as the parameters bounds
@@ -105,6 +76,27 @@ class BayesianOptimization(Observable):
10576
This behavior may be desired in high noise situations where repeatedly probing
10677
the same point will give different answers. In other situations, the acquisition
10778
may occasionally generate a duplicate point.
79+
80+
Attributes
81+
----------
82+
space : TargetSpace
83+
The target space object containing the function to optimize.
84+
85+
acquisition_function : AcquisitionFunction
86+
The acquisition function used to propose new points.
87+
88+
constraint : ConstraintModel or None
89+
The constraint model, if any.
90+
91+
max : dict or None
92+
Maximum target value observed and corresponding parameters.
93+
94+
res : list
95+
All target values observed and corresponding parameters and timestamps.
96+
97+
logger : ScreenLogger
98+
The logger used for displaying optimization progress.
99+
You can customize the logger's properties (e.g., colors, verbosity, formatting).
108100
"""
109101

110102
def __init__(
@@ -173,7 +165,9 @@ def __init__(
173165
self._bounds_transformer.initialize(self._space)
174166

175167
self._sorting_warning_already_shown = False # TODO: remove in future version
176-
super().__init__(events=DEFAULT_EVENTS)
168+
169+
# Initialize logger
170+
self.logger = ScreenLogger(verbose=self._verbose, is_constrained=self.is_constrained)
177171

178172
@property
179173
def space(self) -> TargetSpace:
@@ -236,7 +230,7 @@ def register(
236230
warn(msg, stacklevel=1)
237231
self._sorting_warning_already_shown = True
238232
self._space.register(params, target, constraint_value)
239-
self.dispatch(Events.OPTIMIZATION_STEP)
233+
self.logger.log_optimization_step(self)
240234

241235
def probe(self, params: ParamsType, lazy: bool = True) -> None:
242236
"""Evaluate the function at the given points.
@@ -268,7 +262,7 @@ def probe(self, params: ParamsType, lazy: bool = True) -> None:
268262
self._queue.append(params)
269263
else:
270264
self._space.probe(params)
271-
self.dispatch(Events.OPTIMIZATION_STEP)
265+
self.logger.log_optimization_step(self)
272266

273267
def suggest(self) -> dict[str, float | NDArray[Float]]:
274268
"""Suggest a promising point to probe next."""
@@ -295,13 +289,6 @@ def _prime_queue(self, init_points: int) -> None:
295289
sample = self._space.random_sample(random_state=self._random_state)
296290
self._queue.append(self._space.array_to_params(sample))
297291

298-
def _prime_subscriptions(self) -> None:
299-
if not any([len(subs) for subs in self._events.values()]):
300-
_logger = _get_default_logger(self._verbose, self.is_constrained)
301-
self.subscribe(Events.OPTIMIZATION_START, _logger)
302-
self.subscribe(Events.OPTIMIZATION_STEP, _logger)
303-
self.subscribe(Events.OPTIMIZATION_END, _logger)
304-
305292
def maximize(self, init_points: int = 5, n_iter: int = 25) -> None:
306293
r"""
307294
Maximize the given function over the target space.
@@ -324,8 +311,10 @@ def maximize(self, init_points: int = 5, n_iter: int = 25) -> None:
324311
optimization routine, make sure to fit it manually, e.g. by calling
325312
``optimizer._gp.fit(optimizer.space.params, optimizer.space.target)``.
326313
"""
327-
self._prime_subscriptions()
328-
self.dispatch(Events.OPTIMIZATION_START)
314+
# Log optimization start
315+
self.logger.log_optimization_start(self)
316+
317+
# Prime the queue with random points
329318
self._prime_queue(init_points)
330319

331320
iteration = 0
@@ -342,7 +331,8 @@ def maximize(self, init_points: int = 5, n_iter: int = 25) -> None:
342331
# the init_points points (only for the true iterations)
343332
self.set_bounds(self._bounds_transformer.transform(self._space))
344333

345-
self.dispatch(Events.OPTIMIZATION_END)
334+
# Log optimization end
335+
self.logger.log_optimization_end(self)
346336

347337
def set_bounds(self, new_bounds: BoundsMapping) -> None:
348338
"""Modify the bounds of the search space.

bayes_opt/event.py

Lines changed: 0 additions & 17 deletions
This file was deleted.

bayes_opt/logger.py

Lines changed: 58 additions & 91 deletions
Original file line numberDiff line numberDiff line change
@@ -2,48 +2,19 @@
22

33
from __future__ import annotations
44

5-
import json
6-
from contextlib import suppress
7-
from pathlib import Path
5+
from datetime import datetime
86
from typing import TYPE_CHECKING, Any
97

108
import numpy as np
119
from colorama import Fore, just_fix_windows_console
1210

13-
from bayes_opt.event import Events
14-
from bayes_opt.observer import _Tracker
15-
1611
if TYPE_CHECKING:
17-
from os import PathLike
18-
1912
from bayes_opt.bayesian_optimization import BayesianOptimization
2013

2114
just_fix_windows_console()
2215

2316

24-
def _get_default_logger(verbose: int, is_constrained: bool) -> ScreenLogger:
25-
"""
26-
Return the default logger.
27-
28-
Parameters
29-
----------
30-
verbose : int
31-
Verbosity level of the logger.
32-
33-
is_constrained : bool
34-
Whether the underlying optimizer uses constraints (this requires
35-
an additional column in the output).
36-
37-
Returns
38-
-------
39-
ScreenLogger
40-
The default logger.
41-
42-
"""
43-
return ScreenLogger(verbose=verbose, is_constrained=is_constrained)
44-
45-
46-
class ScreenLogger(_Tracker):
17+
class ScreenLogger:
4718
"""Logger that outputs text, e.g. to log to a terminal.
4819
4920
Parameters
@@ -66,7 +37,11 @@ def __init__(self, verbose: int = 2, is_constrained: bool = False) -> None:
6637
self._verbose = verbose
6738
self._is_constrained = is_constrained
6839
self._header_length = None
69-
super().__init__()
40+
self._iterations = 0
41+
self._previous_max = None
42+
self._previous_max_params = None
43+
self._start_time = None
44+
self._previous_time = None
7045

7146
@property
7247
def verbose(self) -> int:
@@ -221,84 +196,76 @@ def _is_new_max(self, instance: BayesianOptimization) -> bool:
221196
self._previous_max = instance.max["target"]
222197
return instance.max["target"] > self._previous_max
223198

224-
def update(self, event: str, instance: BayesianOptimization) -> None:
225-
"""Handle incoming events.
199+
def _update_tracker(self, instance: BayesianOptimization) -> None:
200+
"""Update the tracker.
226201
227202
Parameters
228203
----------
229-
event : str
230-
One of the values associated with `Events.OPTIMIZATION_START`,
231-
`Events.OPTIMIZATION_STEP` or `Events.OPTIMIZATION_END`.
232-
233204
instance : bayesian_optimization.BayesianOptimization
234205
The instance associated with the step.
235206
"""
236-
line = ""
237-
if event == Events.OPTIMIZATION_START:
238-
line = self._header(instance) + "\n"
239-
elif event == Events.OPTIMIZATION_STEP:
240-
is_new_max = self._is_new_max(instance)
241-
if self._verbose != 1 or is_new_max:
242-
colour = self._colour_new_max if is_new_max else self._colour_regular_message
243-
line = self._step(instance, colour=colour) + "\n"
244-
elif event == Events.OPTIMIZATION_END:
245-
line = "=" * self._header_length + "\n"
207+
self._iterations += 1
246208

247-
if self._verbose:
248-
print(line, end="")
249-
self._update_tracker(event, instance)
250-
251-
252-
class JSONLogger(_Tracker):
253-
"""
254-
Logger that outputs steps in JSON format.
209+
if instance.max is None:
210+
return
255211

256-
The resulting file can be used to restart the optimization from an earlier state.
212+
current_max = instance.max
257213

258-
Parameters
259-
----------
260-
path : str or os.PathLike
261-
Path to the file to write to.
214+
if self._previous_max is None or current_max["target"] > self._previous_max:
215+
self._previous_max = current_max["target"]
216+
self._previous_max_params = current_max["params"]
262217

263-
reset : bool
264-
Whether to overwrite the file if it already exists.
218+
def _time_metrics(self) -> tuple[str, float, float]:
219+
"""Return time passed since last call."""
220+
now = datetime.now() # noqa: DTZ005
221+
if self._start_time is None:
222+
self._start_time = now
223+
if self._previous_time is None:
224+
self._previous_time = now
265225

266-
"""
226+
time_elapsed = now - self._start_time
227+
time_delta = now - self._previous_time
267228

268-
def __init__(self, path: str | PathLike[str], reset: bool = True):
269-
self._path = Path(path)
270-
if reset:
271-
with suppress(OSError):
272-
self._path.unlink(missing_ok=True)
273-
super().__init__()
229+
self._previous_time = now
230+
return (now.strftime("%Y-%m-%d %H:%M:%S"), time_elapsed.total_seconds(), time_delta.total_seconds())
274231

275-
def update(self, event: str, instance: BayesianOptimization) -> None:
276-
"""
277-
Handle incoming events.
232+
def log_optimization_start(self, instance: BayesianOptimization) -> None:
233+
"""Log the start of the optimization process.
278234
279235
Parameters
280236
----------
281-
event : str
282-
One of the values associated with `Events.OPTIMIZATION_START`,
283-
`Events.OPTIMIZATION_STEP` or `Events.OPTIMIZATION_END`.
284-
285-
instance : bayesian_optimization.BayesianOptimization
286-
The instance associated with the step.
287-
237+
instance : BayesianOptimization
238+
The instance associated with the event.
288239
"""
289-
if event == Events.OPTIMIZATION_STEP:
290-
data = dict(instance.res[-1])
240+
if self._verbose:
241+
line = self._header(instance) + "\n"
242+
print(line, end="")
291243

292-
now, time_elapsed, time_delta = self._time_metrics()
293-
data["datetime"] = {"datetime": now, "elapsed": time_elapsed, "delta": time_delta}
244+
def log_optimization_step(self, instance: BayesianOptimization) -> None:
245+
"""Log an optimization step.
294246
295-
if "allowed" in data: # fix: github.com/fmfn/BayesianOptimization/issues/361
296-
data["allowed"] = bool(data["allowed"])
247+
Parameters
248+
----------
249+
instance : BayesianOptimization
250+
The instance associated with the event.
251+
"""
252+
is_new_max = self._is_new_max(instance)
253+
self._update_tracker(instance)
297254

298-
if "constraint" in data and isinstance(data["constraint"], np.ndarray):
299-
data["constraint"] = data["constraint"].tolist()
255+
if self._verbose != 1 or is_new_max:
256+
colour = self._colour_new_max if is_new_max else self._colour_regular_message
257+
line = self._step(instance, colour=colour) + "\n"
258+
if self._verbose:
259+
print(line, end="")
300260

301-
with self._path.open("a") as f:
302-
f.write(json.dumps(data) + "\n")
261+
def log_optimization_end(self, instance: BayesianOptimization) -> None:
262+
"""Log the end of the optimization process.
303263
304-
self._update_tracker(event, instance)
264+
Parameters
265+
----------
266+
instance : BayesianOptimization
267+
The instance associated with the event.
268+
"""
269+
if self._verbose and self._header_length is not None:
270+
line = "=" * self._header_length + "\n"
271+
print(line, end="")

0 commit comments

Comments
 (0)