Skip to content

Commit c870288

Browse files
committed
add tests for the screen logger class
1 parent 4484e1b commit c870288

File tree

1 file changed

+324
-0
lines changed

1 file changed

+324
-0
lines changed

tests/test_logger.py

Lines changed: 324 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,324 @@
1+
from __future__ import annotations
2+
3+
import io
4+
from unittest.mock import patch
5+
6+
from colorama import Fore
7+
8+
from bayes_opt import BayesianOptimization
9+
from bayes_opt.logger import ScreenLogger
10+
11+
12+
def target_func(**kwargs):
13+
"""Simple target function for testing."""
14+
return sum(kwargs.values())
15+
16+
17+
PBOUNDS = {"p1": (0, 10), "p2": (0, 10)}
18+
19+
20+
def test_initialization():
21+
"""Test logger initialization with default and custom parameters."""
22+
# Default parameters
23+
logger = ScreenLogger()
24+
assert logger.verbose == 2
25+
assert not logger.is_constrained
26+
27+
# Custom parameters
28+
logger = ScreenLogger(verbose=0, is_constrained=True)
29+
assert logger.verbose == 0
30+
assert logger.is_constrained
31+
32+
33+
def test_verbose_property():
34+
"""Test the verbose property getter and setter."""
35+
logger = ScreenLogger(verbose=1)
36+
assert logger.verbose == 1
37+
38+
logger.verbose = 0
39+
assert logger.verbose == 0
40+
41+
logger.verbose = 2
42+
assert logger.verbose == 2
43+
44+
45+
def test_is_constrained_property():
46+
"""Test the is_constrained property getter."""
47+
logger = ScreenLogger(is_constrained=False)
48+
assert not logger.is_constrained
49+
50+
logger = ScreenLogger(is_constrained=True)
51+
assert logger.is_constrained
52+
53+
54+
def test_format_number():
55+
"""Test the _format_number method."""
56+
logger = ScreenLogger()
57+
58+
# Test integer formatting
59+
assert len(logger._format_number(42)) == logger._default_cell_size
60+
61+
# Test float formatting with precision
62+
float_str = logger._format_number(3.14159)
63+
assert len(float_str) == logger._default_cell_size
64+
assert "3.14" in float_str # default precision is 4
65+
66+
# Test long integer truncation
67+
long_int = 12345678901234
68+
formatted = logger._format_number(long_int)
69+
assert len(formatted) == logger._default_cell_size
70+
assert "..." in formatted
71+
72+
# Test long float truncation
73+
long_float = 1234.5678901234
74+
formatted = logger._format_number(long_float)
75+
assert len(formatted) == logger._default_cell_size
76+
77+
78+
def test_format_bool():
79+
"""Test the _format_bool method."""
80+
logger = ScreenLogger()
81+
82+
# Test True formatting
83+
true_str = logger._format_bool(True)
84+
assert len(true_str) == logger._default_cell_size
85+
assert "True" in true_str
86+
87+
# Test False formatting
88+
false_str = logger._format_bool(False)
89+
assert len(false_str) == logger._default_cell_size
90+
assert "False" in false_str
91+
92+
# Test with small cell size
93+
small_cell_logger = ScreenLogger()
94+
small_cell_logger._default_cell_size = 3
95+
assert small_cell_logger._format_bool(True) == "T "
96+
assert small_cell_logger._format_bool(False) == "F "
97+
98+
99+
def test_format_str():
100+
"""Test the _format_str method."""
101+
logger = ScreenLogger()
102+
103+
# Test normal string
104+
normal_str = logger._format_str("test")
105+
assert len(normal_str) == logger._default_cell_size
106+
assert "test" in normal_str
107+
108+
# Test long string truncation
109+
long_str = "this_is_a_very_long_string_that_should_be_truncated"
110+
formatted = logger._format_str(long_str)
111+
assert len(formatted) == logger._default_cell_size
112+
assert "..." in formatted
113+
114+
115+
def test_step():
116+
"""Test the _step method."""
117+
optimizer = BayesianOptimization(target_func, PBOUNDS, random_state=1)
118+
logger = ScreenLogger()
119+
120+
# Register a point so we have something to log
121+
optimizer.register(params={"p1": 1.5, "p2": 2.5}, target=4.0)
122+
123+
# Test default color
124+
step_str = logger._step(optimizer)
125+
assert "|" in step_str
126+
assert "1" in step_str # iteration
127+
assert "4.0" in step_str # target value
128+
129+
# Test with custom color
130+
custom_color = Fore.RED
131+
step_str_colored = logger._step(optimizer, colour=custom_color)
132+
assert custom_color in step_str_colored
133+
134+
135+
def test_header():
136+
"""Test the _header method."""
137+
optimizer = BayesianOptimization(target_func, PBOUNDS, random_state=1)
138+
logger = ScreenLogger()
139+
140+
header_str = logger._header(optimizer)
141+
142+
# Check if header contains expected column names
143+
assert "iter" in header_str
144+
assert "target" in header_str
145+
assert "p1" in header_str
146+
assert "p2" in header_str
147+
148+
# Check if divider line is included
149+
assert "-" * 10 in header_str
150+
151+
# Check with constrained logger
152+
constrained_logger = ScreenLogger(is_constrained=True)
153+
constrained_header = constrained_logger._header(optimizer)
154+
assert "allowed" in constrained_header
155+
156+
157+
def test_is_new_max():
158+
"""Test the _is_new_max method."""
159+
optimizer = BayesianOptimization(target_func, PBOUNDS, random_state=1)
160+
logger = ScreenLogger()
161+
162+
# No observations yet
163+
assert not logger._is_new_max(optimizer)
164+
165+
# Add first observation
166+
optimizer.register(params={"p1": 1, "p2": 2}, target=3)
167+
# The first time _is_new_max is called, it initializes the previous_max and returns False,
168+
# on the second call it should detect the first observation as a new max
169+
logger._is_new_max(optimizer) # First call initializes _previous_max
170+
assert not logger._is_new_max(optimizer) # Second call should still return False (not a new max)
171+
172+
# Add lower observation (not a new max)
173+
optimizer.register(params={"p1": 0.5, "p2": 1}, target=1.5)
174+
assert not logger._is_new_max(optimizer)
175+
176+
# Add higher observation (new max)
177+
optimizer.register(params={"p1": 2, "p2": 2}, target=4)
178+
assert logger._is_new_max(optimizer) # Now we should have a new max
179+
180+
181+
def test_update_tracker():
182+
"""Test the _update_tracker method."""
183+
optimizer = BayesianOptimization(target_func, PBOUNDS, random_state=1)
184+
logger = ScreenLogger()
185+
186+
# Initial state
187+
assert logger._iterations == 0
188+
assert logger._previous_max is None
189+
190+
# Update with first observation
191+
optimizer.register(params={"p1": 1, "p2": 2}, target=3)
192+
logger._update_tracker(optimizer)
193+
assert logger._iterations == 1
194+
assert logger._previous_max == 3
195+
assert logger._previous_max_params == {"p1": 1, "p2": 2}
196+
197+
# Update with lower observation
198+
optimizer.register(params={"p1": 0.5, "p2": 1}, target=1.5)
199+
logger._update_tracker(optimizer)
200+
assert logger._iterations == 2
201+
assert logger._previous_max == 3 # Unchanged
202+
assert logger._previous_max_params == {"p1": 1, "p2": 2} # Unchanged
203+
204+
# Update with higher observation
205+
optimizer.register(params={"p1": 2, "p2": 2}, target=4)
206+
logger._update_tracker(optimizer)
207+
assert logger._iterations == 3
208+
assert logger._previous_max == 4 # Updated
209+
assert logger._previous_max_params == {"p1": 2, "p2": 2} # Updated
210+
211+
212+
def test_time_metrics():
213+
"""Test the _time_metrics method."""
214+
logger = ScreenLogger()
215+
216+
# First call initializes times
217+
time_str, total_elapsed, delta = logger._time_metrics()
218+
assert isinstance(time_str, str)
219+
assert isinstance(total_elapsed, float)
220+
assert isinstance(delta, float)
221+
assert delta <= 0.1 # First call should have very small delta
222+
223+
# Subsequent call should show time difference
224+
import time
225+
226+
time.sleep(0.01) # Small delay to ensure time difference
227+
time_str2, total_elapsed2, delta2 = logger._time_metrics()
228+
assert total_elapsed2 > total_elapsed
229+
assert delta2 > 0
230+
231+
232+
@patch("sys.stdout", new_callable=io.StringIO)
233+
def test_log_optimization_start(mock_stdout):
234+
"""Test the log_optimization_start method."""
235+
optimizer = BayesianOptimization(target_func, PBOUNDS, random_state=1)
236+
237+
# Test with verbose=0 (should not print)
238+
logger = ScreenLogger(verbose=0)
239+
logger.log_optimization_start(optimizer)
240+
assert mock_stdout.getvalue() == ""
241+
242+
# Test with verbose=1 (should print)
243+
logger.verbose = 1
244+
logger.log_optimization_start(optimizer)
245+
output = mock_stdout.getvalue()
246+
assert "iter" in output
247+
assert "target" in output
248+
assert "p1" in output
249+
assert "p2" in output
250+
251+
252+
@patch("sys.stdout", new_callable=io.StringIO)
253+
def test_log_optimization_step(mock_stdout):
254+
"""Test the log_optimization_step method."""
255+
optimizer = BayesianOptimization(target_func, PBOUNDS, random_state=1)
256+
257+
# Create logger with verbose=1 specifically, as this is the only verbose level
258+
# that doesn't print for non-max points according to the implementation:
259+
# if self._verbose != 1 or is_new_max:
260+
logger = ScreenLogger(verbose=1)
261+
262+
# Clear any output that might have happened
263+
mock_stdout.truncate(0)
264+
mock_stdout.seek(0)
265+
266+
# Register a point and log it
267+
optimizer.register(params={"p1": 1, "p2": 2}, target=3)
268+
logger._is_new_max(optimizer) # Initialize previous_max
269+
270+
# Register another point (not a new max)
271+
optimizer.register(params={"p1": 0.5, "p2": 1}, target=1.5)
272+
273+
# Clear output buffer
274+
mock_stdout.truncate(0)
275+
mock_stdout.seek(0)
276+
277+
# Log step - with verbose=1 and not a new max, it shouldn't print
278+
logger.log_optimization_step(optimizer)
279+
assert mock_stdout.getvalue() == ""
280+
281+
# Test with verbose=2 (should print regular steps)
282+
logger.verbose = 2
283+
mock_stdout.truncate(0)
284+
mock_stdout.seek(0)
285+
286+
# Log the same step again
287+
logger.log_optimization_step(optimizer)
288+
output = mock_stdout.getvalue()
289+
assert "1.5" in output # Target value should be in output
290+
291+
# Test with new max (should print even with verbose=1)
292+
mock_stdout.truncate(0)
293+
mock_stdout.seek(0)
294+
logger.verbose = 1
295+
optimizer.register(params={"p1": 2, "p2": 2}, target=4) # New max
296+
logger.log_optimization_step(optimizer)
297+
output = mock_stdout.getvalue()
298+
assert "4" in output # Target value
299+
assert Fore.MAGENTA in output # Should use magenta color for new max
300+
301+
302+
@patch("sys.stdout", new_callable=io.StringIO)
303+
def test_log_optimization_end(mock_stdout):
304+
"""Test the log_optimization_end method."""
305+
optimizer = BayesianOptimization(target_func, PBOUNDS, random_state=1)
306+
307+
# Initialize header length
308+
logger = ScreenLogger(verbose=2)
309+
logger.log_optimization_start(optimizer)
310+
311+
# Test with verbose=0 (should not print)
312+
logger.verbose = 0
313+
mock_stdout.truncate(0)
314+
mock_stdout.seek(0)
315+
logger.log_optimization_end(optimizer)
316+
assert mock_stdout.getvalue() == ""
317+
318+
# Test with verbose=2 (should print)
319+
logger.verbose = 2
320+
mock_stdout.truncate(0)
321+
mock_stdout.seek(0)
322+
logger.log_optimization_end(optimizer)
323+
output = mock_stdout.getvalue()
324+
assert "=" in output # Should contain the closing line

0 commit comments

Comments
 (0)