Skip to content

Commit aa7a350

Browse files
committed
pass in logger function arguments seperately
1 parent c870288 commit aa7a350

File tree

5 files changed

+162
-130
lines changed

5 files changed

+162
-130
lines changed

bayes_opt/bayesian_optimization.py

Lines changed: 9 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -167,7 +167,11 @@ def __init__(
167167
self._sorting_warning_already_shown = False # TODO: remove in future version
168168

169169
# Initialize logger
170-
self.logger = ScreenLogger(verbose=self._verbose, is_constrained=self.is_constrained)
170+
self.logger = ScreenLogger(
171+
verbose=self._verbose,
172+
is_constrained=self.is_constrained,
173+
params_config=self._space._params_config,
174+
)
171175

172176
@property
173177
def space(self) -> TargetSpace:
@@ -230,7 +234,7 @@ def register(
230234
warn(msg, stacklevel=1)
231235
self._sorting_warning_already_shown = True
232236
self._space.register(params, target, constraint_value)
233-
self.logger.log_optimization_step(self)
237+
self.logger.log_optimization_step(self._space.keys, self._space.res()[-1], self.max)
234238

235239
def probe(self, params: ParamsType, lazy: bool = True) -> None:
236240
"""Evaluate the function at the given points.
@@ -262,7 +266,7 @@ def probe(self, params: ParamsType, lazy: bool = True) -> None:
262266
self._queue.append(params)
263267
else:
264268
self._space.probe(params)
265-
self.logger.log_optimization_step(self)
269+
self.logger.log_optimization_step(self._space.keys, self._space.res()[-1], self.max)
266270

267271
def suggest(self) -> dict[str, float | NDArray[Float]]:
268272
"""Suggest a promising point to probe next."""
@@ -312,7 +316,7 @@ def maximize(self, init_points: int = 5, n_iter: int = 25) -> None:
312316
``optimizer._gp.fit(optimizer.space.params, optimizer.space.target)``.
313317
"""
314318
# Log optimization start
315-
self.logger.log_optimization_start(self)
319+
self.logger.log_optimization_start(self._space.keys)
316320

317321
# Prime the queue with random points
318322
self._prime_queue(init_points)
@@ -332,7 +336,7 @@ def maximize(self, init_points: int = 5, n_iter: int = 25) -> None:
332336
self.set_bounds(self._bounds_transformer.transform(self._space))
333337

334338
# Log optimization end
335-
self.logger.log_optimization_end(self)
339+
self.logger.log_optimization_end()
336340

337341
def set_bounds(self, new_bounds: BoundsMapping) -> None:
338342
"""Modify the bounds of the search space.

bayes_opt/logger.py

Lines changed: 77 additions & 48 deletions
Original file line numberDiff line numberDiff line change
@@ -2,13 +2,14 @@
22

33
from __future__ import annotations
44

5+
from collections.abc import Mapping
56
from datetime import datetime
67
from typing import TYPE_CHECKING, Any
78

89
from colorama import Fore, just_fix_windows_console
910

1011
if TYPE_CHECKING:
11-
from bayes_opt.bayesian_optimization import BayesianOptimization
12+
from bayes_opt.parameter import ParameterConfig
1213

1314
just_fix_windows_console()
1415

@@ -32,9 +33,15 @@ class ScreenLogger:
3233
_colour_regular_message = Fore.RESET
3334
_colour_reset = Fore.RESET
3435

35-
def __init__(self, verbose: int = 2, is_constrained: bool = False) -> None:
36+
def __init__(
37+
self,
38+
verbose: int = 2,
39+
is_constrained: bool = False,
40+
params_config: Mapping[str, ParameterConfig] | None = None,
41+
) -> None:
3642
self._verbose = verbose
3743
self._is_constrained = is_constrained
44+
self._params_config = params_config
3845
self._header_length = None
3946
self._iterations = 0
4047
self._previous_max = None
@@ -63,6 +70,22 @@ def is_constrained(self) -> bool:
6370
"""Return whether the logger is constrained."""
6471
return self._is_constrained
6572

73+
@property
74+
def params_config(self) -> Mapping[str, ParameterConfig] | None:
75+
"""Return the parameter configuration used for formatting."""
76+
return self._params_config
77+
78+
@params_config.setter
79+
def params_config(self, config: Mapping[str, ParameterConfig]) -> None:
80+
"""Set the parameter configuration used for formatting.
81+
82+
Parameters
83+
----------
84+
config : Mapping[str, ParameterConfig]
85+
New parameter configuration.
86+
"""
87+
self._params_config = config
88+
6689
def _format_number(self, x: float) -> str:
6790
"""Format a number.
6891
@@ -118,50 +141,56 @@ def _format_str(self, str_: str) -> str:
118141
return s[: self._default_cell_size - 3] + "..."
119142
return s
120143

121-
def _step(self, instance: BayesianOptimization, colour: str = _colour_regular_message) -> str:
122-
"""Log a step.
144+
def _print_step(
145+
self, result: dict[str, Any], keys: list[str], colour: str = _colour_regular_message
146+
) -> str:
147+
"""Print a step.
123148
124149
Parameters
125150
----------
126-
instance : bayesian_optimization.BayesianOptimization
127-
The instance associated with the event.
151+
result : dict[str, Any]
152+
The result dictionary for the most recent step.
128153
129-
colour :
154+
keys : list[str]
155+
The parameter keys.
156+
157+
colour : str, optional
158+
Color to use for the output.
130159
(Default value = _colour_regular_message, equivalent to Fore.RESET)
131160
132161
Returns
133162
-------
134163
A stringified, formatted version of the most recent optimization step.
135164
"""
136-
res: dict[str, Any] = instance.res[-1]
137-
keys: list[str] = instance.space.keys
165+
if self._params_config is None:
166+
err_msg = "Parameter configuration is not set. Call set_params_config before logging."
167+
raise ValueError(err_msg)
168+
138169
# iter, target, allowed [, *params]
139170
cells: list[str | None] = [None] * (3 + len(keys))
140171

141-
cells[:2] = self._format_number(self._iterations + 1), self._format_number(res["target"])
172+
cells[:2] = self._format_number(self._iterations + 1), self._format_number(result["target"])
142173
if self._is_constrained:
143-
cells[2] = self._format_bool(res["allowed"])
144-
params = res.get("params", {})
174+
cells[2] = self._format_bool(result["allowed"])
175+
params = result.get("params", {})
145176
cells[3:] = [
146-
instance.space._params_config[key].to_string(val, self._default_cell_size)
147-
for key, val in params.items()
177+
self._params_config[key].to_string(val, self._default_cell_size) for key, val in params.items()
148178
]
149179

150180
return "| " + " | ".join(colour + x + self._colour_reset for x in cells if x is not None) + " |"
151181

152-
def _header(self, instance: BayesianOptimization) -> str:
182+
def _print_header(self, keys: list[str]) -> str:
153183
"""Print the header of the log.
154184
155185
Parameters
156186
----------
157-
instance : bayesian_optimization.BayesianOptimization
158-
The instance associated with the header.
187+
keys : list[str]
188+
The parameter keys.
159189
160190
Returns
161191
-------
162192
A stringified, formatted version of the most header.
163193
"""
164-
keys: list[str] = instance.space.keys
165194
# iter, target, allowed [, *params]
166195
cells: list[str | None] = [None] * (3 + len(keys))
167196

@@ -174,42 +203,40 @@ def _header(self, instance: BayesianOptimization) -> str:
174203
self._header_length = len(line)
175204
return line + "\n" + ("-" * self._header_length)
176205

177-
def _is_new_max(self, instance: BayesianOptimization) -> bool:
206+
def _is_new_max(self, current_max: dict[str, Any] | None) -> bool:
178207
"""Check if the step to log produced a new maximum.
179208
180209
Parameters
181210
----------
182-
instance : bayesian_optimization.BayesianOptimization
183-
The instance associated with the step.
211+
current_max : dict[str, Any] | None
212+
The current maximum target value and its parameters.
184213
185214
Returns
186215
-------
187216
boolean
188217
"""
189-
if instance.max is None:
218+
if current_max is None:
190219
# During constrained optimization, there might not be a maximum
191220
# value since the optimizer might've not encountered any points
192221
# that fulfill the constraints.
193222
return False
194223
if self._previous_max is None:
195-
self._previous_max = instance.max["target"]
196-
return instance.max["target"] > self._previous_max
224+
self._previous_max = current_max["target"]
225+
return current_max["target"] > self._previous_max
197226

198-
def _update_tracker(self, instance: BayesianOptimization) -> None:
227+
def _update_tracker(self, current_max: dict[str, Any] | None) -> None:
199228
"""Update the tracker.
200229
201230
Parameters
202231
----------
203-
instance : bayesian_optimization.BayesianOptimization
204-
The instance associated with the step.
232+
current_max : dict[str, Any] | None
233+
The current maximum target value and its parameters.
205234
"""
206235
self._iterations += 1
207236

208-
if instance.max is None:
237+
if current_max is None:
209238
return
210239

211-
current_max = instance.max
212-
213240
if self._previous_max is None or current_max["target"] > self._previous_max:
214241
self._previous_max = current_max["target"]
215242
self._previous_max_params = current_max["params"]
@@ -228,43 +255,45 @@ def _time_metrics(self) -> tuple[str, float, float]:
228255
self._previous_time = now
229256
return (now.strftime("%Y-%m-%d %H:%M:%S"), time_elapsed.total_seconds(), time_delta.total_seconds())
230257

231-
def log_optimization_start(self, instance: BayesianOptimization) -> None:
258+
def log_optimization_start(self, keys: list[str]) -> None:
232259
"""Log the start of the optimization process.
233260
234261
Parameters
235262
----------
236-
instance : BayesianOptimization
237-
The instance associated with the event.
263+
keys : list[str]
264+
The parameter keys.
238265
"""
239266
if self._verbose:
240-
line = self._header(instance) + "\n"
267+
line = self._print_header(keys) + "\n"
241268
print(line, end="")
242269

243-
def log_optimization_step(self, instance: BayesianOptimization) -> None:
270+
def log_optimization_step(
271+
self, keys: list[str], result: dict[str, Any], current_max: dict[str, Any] | None
272+
) -> None:
244273
"""Log an optimization step.
245274
246275
Parameters
247276
----------
248-
instance : BayesianOptimization
249-
The instance associated with the event.
277+
keys : list[str]
278+
The parameter keys.
279+
280+
result : dict[str, Any]
281+
The result dictionary for the most recent step.
282+
283+
current_max : dict[str, Any] | None
284+
The current maximum target value and its parameters.
250285
"""
251-
is_new_max = self._is_new_max(instance)
252-
self._update_tracker(instance)
286+
is_new_max = self._is_new_max(current_max)
287+
self._update_tracker(current_max)
253288

254289
if self._verbose != 1 or is_new_max:
255290
colour = self._colour_new_max if is_new_max else self._colour_regular_message
256-
line = self._step(instance, colour=colour) + "\n"
291+
line = self._print_step(result, keys, colour=colour) + "\n"
257292
if self._verbose:
258293
print(line, end="")
259294

260-
def log_optimization_end(self, instance: BayesianOptimization) -> None:
261-
"""Log the end of the optimization process.
262-
263-
Parameters
264-
----------
265-
instance : BayesianOptimization
266-
The instance associated with the event.
267-
"""
295+
def log_optimization_end(self) -> None:
296+
"""Log the end of the optimization process."""
268297
if self._verbose and self._header_length is not None:
269298
line = "=" * self._header_length + "\n"
270299
print(line, end="")

examples/parameter_types.ipynb

Lines changed: 20 additions & 20 deletions
Large diffs are not rendered by default.

0 commit comments

Comments
 (0)