Skip to content

Commit 6e4f33d

Browse files
committed
remove unnecessary attribute descriptions, move params_config to function argument
1 parent ef31faf commit 6e4f33d

File tree

3 files changed

+39
-142
lines changed

3 files changed

+39
-142
lines changed

bayes_opt/bayesian_optimization.py

Lines changed: 7 additions & 28 deletions
Original file line numberDiff line numberDiff line change
@@ -76,27 +76,6 @@ class BayesianOptimization:
7676
This behavior may be desired in high noise situations where repeatedly probing
7777
the same point will give different answers. In other situations, the acquisition
7878
may occasionally generate a duplicate point.
79-
80-
Attributes
81-
----------
82-
space : TargetSpace
83-
The target space object containing the function to optimize.
84-
85-
acquisition_function : AcquisitionFunction
86-
The acquisition function used to propose new points.
87-
88-
constraint : ConstraintModel or None
89-
The constraint model, if any.
90-
91-
max : dict or None
92-
Maximum target value observed and corresponding parameters.
93-
94-
res : list
95-
All target values observed and corresponding parameters and timestamps.
96-
97-
logger : ScreenLogger
98-
The logger used for displaying optimization progress.
99-
You can customize the logger's properties (e.g., colors, verbosity, formatting).
10079
"""
10180

10281
def __init__(
@@ -167,11 +146,7 @@ def __init__(
167146
self._sorting_warning_already_shown = False # TODO: remove in future version
168147

169148
# Initialize logger
170-
self.logger = ScreenLogger(
171-
verbose=self._verbose,
172-
is_constrained=self.is_constrained,
173-
params_config=self._space._params_config,
174-
)
149+
self.logger = ScreenLogger(verbose=self._verbose, is_constrained=self.is_constrained)
175150

176151
@property
177152
def space(self) -> TargetSpace:
@@ -234,7 +209,9 @@ def register(
234209
warn(msg, stacklevel=1)
235210
self._sorting_warning_already_shown = True
236211
self._space.register(params, target, constraint_value)
237-
self.logger.log_optimization_step(self._space.keys, self._space.res()[-1], self.max)
212+
self.logger.log_optimization_step(
213+
self._space.keys, self._space.res()[-1], self._space.params_config, self.max
214+
)
238215

239216
def probe(self, params: ParamsType, lazy: bool = True) -> None:
240217
"""Evaluate the function at the given points.
@@ -266,7 +243,9 @@ def probe(self, params: ParamsType, lazy: bool = True) -> None:
266243
self._queue.append(params)
267244
else:
268245
self._space.probe(params)
269-
self.logger.log_optimization_step(self._space.keys, self._space.res()[-1], self.max)
246+
self.logger.log_optimization_step(
247+
self._space.keys, self._space.res()[-1], self._space.params_config, self.max
248+
)
270249

271250
def suggest(self) -> dict[str, float | NDArray[Float]]:
272251
"""Suggest a promising point to probe next."""

bayes_opt/logger.py

Lines changed: 19 additions & 46 deletions
Original file line numberDiff line numberDiff line change
@@ -3,7 +3,6 @@
33
from __future__ import annotations
44

55
from collections.abc import Mapping
6-
from datetime import datetime
76
from typing import TYPE_CHECKING, Any
87

98
from colorama import Fore, just_fix_windows_console
@@ -33,15 +32,9 @@ class ScreenLogger:
3332
_colour_regular_message = Fore.RESET
3433
_colour_reset = Fore.RESET
3534

36-
def __init__(
37-
self,
38-
verbose: int = 2,
39-
is_constrained: bool = False,
40-
params_config: Mapping[str, ParameterConfig] | None = None,
41-
) -> None:
35+
def __init__(self, verbose: int = 2, is_constrained: bool = False) -> None:
4236
self._verbose = verbose
4337
self._is_constrained = is_constrained
44-
self._params_config = params_config
4538
self._header_length = None
4639
self._iterations = 0
4740
self._previous_max = None
@@ -70,22 +63,6 @@ def is_constrained(self) -> bool:
7063
"""Return whether the logger is constrained."""
7164
return self._is_constrained
7265

73-
@property
74-
def params_config(self) -> Mapping[str, ParameterConfig] | None:
75-
"""Return the parameter configuration used for formatting."""
76-
return self._params_config
77-
78-
@params_config.setter
79-
def params_config(self, config: Mapping[str, ParameterConfig]) -> None:
80-
"""Set the parameter configuration used for formatting.
81-
82-
Parameters
83-
----------
84-
config : Mapping[str, ParameterConfig]
85-
New parameter configuration.
86-
"""
87-
self._params_config = config
88-
8966
def _format_number(self, x: float) -> str:
9067
"""Format a number.
9168
@@ -142,7 +119,11 @@ def _format_str(self, str_: str) -> str:
142119
return s
143120

144121
def _print_step(
145-
self, result: dict[str, Any], keys: list[str], colour: str = _colour_regular_message
122+
self,
123+
result: dict[str, Any],
124+
keys: list[str],
125+
params_config: Mapping[str, ParameterConfig],
126+
colour: str = _colour_regular_message,
146127
) -> str:
147128
"""Print a step.
148129
@@ -154,6 +135,9 @@ def _print_step(
154135
keys : list[str]
155136
The parameter keys.
156137
138+
params_config : Mapping[str, ParameterConfig]
139+
The configuration to map the key to the parameter for correct formatting.
140+
157141
colour : str, optional
158142
Color to use for the output.
159143
(Default value = _colour_regular_message, equivalent to Fore.RESET)
@@ -162,10 +146,6 @@ def _print_step(
162146
-------
163147
A stringified, formatted version of the most recent optimization step.
164148
"""
165-
if self._params_config is None:
166-
err_msg = "Parameter configuration is not set. Call set_params_config before logging."
167-
raise ValueError(err_msg)
168-
169149
# iter, target, allowed [, *params]
170150
cells: list[str | None] = [None] * (3 + len(keys))
171151

@@ -174,7 +154,7 @@ def _print_step(
174154
cells[2] = self._format_bool(result["allowed"])
175155
params = result.get("params", {})
176156
cells[3:] = [
177-
self._params_config[key].to_string(val, self._default_cell_size) for key, val in params.items()
157+
params_config[key].to_string(val, self._default_cell_size) for key, val in params.items()
178158
]
179159

180160
return "| " + " | ".join(colour + x + self._colour_reset for x in cells if x is not None) + " |"
@@ -241,20 +221,6 @@ def _update_tracker(self, current_max: dict[str, Any] | None) -> None:
241221
self._previous_max = current_max["target"]
242222
self._previous_max_params = current_max["params"]
243223

244-
def _time_metrics(self) -> tuple[str, float, float]:
245-
"""Return time passed since last call."""
246-
now = datetime.now() # noqa: DTZ005
247-
if self._start_time is None:
248-
self._start_time = now
249-
if self._previous_time is None:
250-
self._previous_time = now
251-
252-
time_elapsed = now - self._start_time
253-
time_delta = now - self._previous_time
254-
255-
self._previous_time = now
256-
return (now.strftime("%Y-%m-%d %H:%M:%S"), time_elapsed.total_seconds(), time_delta.total_seconds())
257-
258224
def log_optimization_start(self, keys: list[str]) -> None:
259225
"""Log the start of the optimization process.
260226
@@ -268,7 +234,11 @@ def log_optimization_start(self, keys: list[str]) -> None:
268234
print(line, end="")
269235

270236
def log_optimization_step(
271-
self, keys: list[str], result: dict[str, Any], current_max: dict[str, Any] | None
237+
self,
238+
keys: list[str],
239+
result: dict[str, Any],
240+
params_config: Mapping[str, ParameterConfig],
241+
current_max: dict[str, Any] | None,
272242
) -> None:
273243
"""Log an optimization step.
274244
@@ -280,6 +250,9 @@ def log_optimization_step(
280250
result : dict[str, Any]
281251
The result dictionary for the most recent step.
282252
253+
params_config : Mapping[str, ParameterConfig]
254+
The configuration to map the key to the parameter for correct formatting.
255+
283256
current_max : dict[str, Any] | None
284257
The current maximum target value and its parameters.
285258
"""
@@ -288,7 +261,7 @@ def log_optimization_step(
288261

289262
if self._verbose != 1 or is_new_max:
290263
colour = self._colour_new_max if is_new_max else self._colour_regular_message
291-
line = self._print_step(result, keys, colour=colour) + "\n"
264+
line = self._print_step(result, keys, params_config, colour=colour) + "\n"
292265
if self._verbose:
293266
print(line, end="")
294267

tests/test_logger.py

Lines changed: 13 additions & 68 deletions
Original file line numberDiff line numberDiff line change
@@ -1,9 +1,8 @@
11
from __future__ import annotations
22

33
import io
4-
from unittest.mock import MagicMock, patch
4+
from unittest.mock import patch
55

6-
import pytest
76
from colorama import Fore
87

98
from bayes_opt import BayesianOptimization
@@ -52,44 +51,6 @@ def test_is_constrained_property():
5251
assert logger.is_constrained
5352

5453

55-
def test_params_config_property():
56-
"""Test the params_config property getter and setter."""
57-
# Test the getter with default initialization (None)
58-
logger = ScreenLogger()
59-
assert logger.params_config is None
60-
61-
# Test initialization with a params_config
62-
mock_config = {"param1": MagicMock(), "param2": MagicMock()}
63-
logger_with_config = ScreenLogger(params_config=mock_config)
64-
assert logger_with_config.params_config is mock_config
65-
66-
# Test the setter
67-
new_config = {"param3": MagicMock(), "param4": MagicMock()}
68-
logger.params_config = new_config
69-
assert logger.params_config is new_config
70-
71-
# Test that the logger actually uses the params_config
72-
optimizer = BayesianOptimization(target_func, PBOUNDS, random_state=1)
73-
logger.params_config = optimizer._space._params_config
74-
optimizer.register(params={"p1": 1.5, "p2": 2.5}, target=4.0)
75-
76-
# This should not raise an error now that params_config is set
77-
step_str = logger._print_step(optimizer._space.res()[-1], optimizer._space.keys)
78-
assert "|" in step_str
79-
assert "1" in step_str # iteration
80-
assert "4.0" in step_str # target value
81-
82-
83-
def test_print_step_without_params_config():
84-
"""Test that _print_step raises an error when params_config is None."""
85-
logger = ScreenLogger()
86-
optimizer = BayesianOptimization(target_func, PBOUNDS, random_state=1)
87-
optimizer.register(params={"p1": 1.5, "p2": 2.5}, target=4.0)
88-
89-
with pytest.raises(ValueError, match="Parameter configuration is not set"):
90-
logger._print_step(optimizer._space.res()[-1], optimizer._space.keys)
91-
92-
9354
def test_format_number():
9455
"""Test the _format_number method."""
9556
logger = ScreenLogger()
@@ -155,22 +116,23 @@ def test_step():
155116
"""Test the _print_step method."""
156117
optimizer = BayesianOptimization(target_func, PBOUNDS, random_state=1)
157118

158-
# Create a logger with the params_config from the optimizer
159-
logger = ScreenLogger(params_config=optimizer._space._params_config)
119+
logger = ScreenLogger()
160120

161121
# Register a point so we have something to log
162122
optimizer.register(params={"p1": 1.5, "p2": 2.5}, target=4.0)
163123

164124
# Test default color
165-
step_str = logger._print_step(optimizer._space.res()[-1], optimizer._space.keys)
125+
step_str = logger._print_step(
126+
optimizer._space.res()[-1], optimizer._space.keys, optimizer._space.params_config
127+
)
166128
assert "|" in step_str
167129
assert "1" in step_str # iteration
168130
assert "4.0" in step_str # target value
169131

170132
# Test with custom color
171133
custom_color = Fore.RED
172134
step_str_colored = logger._print_step(
173-
optimizer._space.res()[-1], optimizer._space.keys, colour=custom_color
135+
optimizer._space.res()[-1], optimizer._space.keys, optimizer._space.params_config, colour=custom_color
174136
)
175137
assert custom_color in step_str_colored
176138

@@ -252,26 +214,6 @@ def test_update_tracker():
252214
assert logger._previous_max_params == {"p1": 2, "p2": 2} # Updated
253215

254216

255-
def test_time_metrics():
256-
"""Test the _time_metrics method."""
257-
logger = ScreenLogger()
258-
259-
# First call initializes times
260-
time_str, total_elapsed, delta = logger._time_metrics()
261-
assert isinstance(time_str, str)
262-
assert isinstance(total_elapsed, float)
263-
assert isinstance(delta, float)
264-
assert delta <= 0.1 # First call should have very small delta
265-
266-
# Subsequent call should show time difference
267-
import time
268-
269-
time.sleep(0.01) # Small delay to ensure time difference
270-
time_str2, total_elapsed2, delta2 = logger._time_metrics()
271-
assert total_elapsed2 > total_elapsed
272-
assert delta2 > 0
273-
274-
275217
@patch("sys.stdout", new_callable=io.StringIO)
276218
def test_log_optimization_start(mock_stdout):
277219
"""Test the log_optimization_start method."""
@@ -297,8 +239,7 @@ def test_log_optimization_step(mock_stdout):
297239
"""Test the log_optimization_step method."""
298240
optimizer = BayesianOptimization(target_func, PBOUNDS, random_state=1)
299241

300-
# Create a logger with the params_config from the optimizer
301-
logger = ScreenLogger(params_config=optimizer._space._params_config)
242+
logger = ScreenLogger()
302243

303244
# Create logger with verbose=1 specifically, as this is the only verbose level
304245
# that doesn't print for non-max points according to the implementation:
@@ -317,14 +258,18 @@ def test_log_optimization_step(mock_stdout):
317258
# For a point that is not a new max with verbose=1, should not print
318259
mock_stdout.truncate(0)
319260
mock_stdout.seek(0)
320-
logger.log_optimization_step(optimizer._space.keys, optimizer._space.res()[-1], optimizer.max)
261+
logger.log_optimization_step(
262+
optimizer._space.keys, optimizer._space.res()[-1], optimizer._space.params_config, optimizer.max
263+
)
321264
assert mock_stdout.getvalue() == "" # Nothing printed for non-max point with verbose=1
322265

323266
# Register a higher value, which should trigger output with verbose=1
324267
optimizer.register(params={"p1": 2, "p2": 2}, target=4)
325268
mock_stdout.truncate(0)
326269
mock_stdout.seek(0)
327-
logger.log_optimization_step(optimizer._space.keys, optimizer._space.res()[-1], optimizer.max)
270+
logger.log_optimization_step(
271+
optimizer._space.keys, optimizer._space.res()[-1], optimizer._space.params_config, optimizer.max
272+
)
328273
max_output = mock_stdout.getvalue()
329274
assert max_output != "" # Something printed for new max point with verbose=1
330275
assert "4.0" in max_output # Should show target value

0 commit comments

Comments
 (0)