22
33from __future__ import annotations
44
5+ from collections .abc import Mapping
56from datetime import datetime
67from typing import TYPE_CHECKING , Any
78
89from colorama import Fore , just_fix_windows_console
910
1011if TYPE_CHECKING :
11- from bayes_opt .bayesian_optimization import BayesianOptimization
12+ from bayes_opt .parameter import ParameterConfig
1213
1314just_fix_windows_console ()
1415
@@ -32,9 +33,15 @@ class ScreenLogger:
3233 _colour_regular_message = Fore .RESET
3334 _colour_reset = Fore .RESET
3435
35- def __init__ (self , verbose : int = 2 , is_constrained : bool = False ) -> None :
36+ def __init__ (
37+ self ,
38+ verbose : int = 2 ,
39+ is_constrained : bool = False ,
40+ params_config : Mapping [str , ParameterConfig ] | None = None ,
41+ ) -> None :
3642 self ._verbose = verbose
3743 self ._is_constrained = is_constrained
44+ self ._params_config = params_config
3845 self ._header_length = None
3946 self ._iterations = 0
4047 self ._previous_max = None
@@ -63,6 +70,22 @@ def is_constrained(self) -> bool:
6370 """Return whether the logger is constrained."""
6471 return self ._is_constrained
6572
73+ @property
74+ def params_config (self ) -> Mapping [str , ParameterConfig ] | None :
75+ """Return the parameter configuration used for formatting."""
76+ return self ._params_config
77+
78+ @params_config .setter
79+ def params_config (self , config : Mapping [str , ParameterConfig ]) -> None :
80+ """Set the parameter configuration used for formatting.
81+
82+ Parameters
83+ ----------
84+ config : Mapping[str, ParameterConfig]
85+ New parameter configuration.
86+ """
87+ self ._params_config = config
88+
6689 def _format_number (self , x : float ) -> str :
6790 """Format a number.
6891
@@ -118,50 +141,56 @@ def _format_str(self, str_: str) -> str:
118141 return s [: self ._default_cell_size - 3 ] + "..."
119142 return s
120143
121- def _step (self , instance : BayesianOptimization , colour : str = _colour_regular_message ) -> str :
122- """Log a step.
144+ def _print_step (
145+ self , result : dict [str , Any ], keys : list [str ], colour : str = _colour_regular_message
146+ ) -> str :
147+ """Print a step.
123148
124149 Parameters
125150 ----------
126- instance : bayesian_optimization.BayesianOptimization
127- The instance associated with the event .
151+ result : dict[str, Any]
152+ The result dictionary for the most recent step .
128153
129- colour :
154+ keys : list[str]
155+ The parameter keys.
156+
157+ colour : str, optional
158+ Color to use for the output.
130159 (Default value = _colour_regular_message, equivalent to Fore.RESET)
131160
132161 Returns
133162 -------
134163 A stringified, formatted version of the most recent optimization step.
135164 """
136- res : dict [str , Any ] = instance .res [- 1 ]
137- keys : list [str ] = instance .space .keys
165+ if self ._params_config is None :
166+ err_msg = "Parameter configuration is not set. Call set_params_config before logging."
167+ raise ValueError (err_msg )
168+
138169 # iter, target, allowed [, *params]
139170 cells : list [str | None ] = [None ] * (3 + len (keys ))
140171
141- cells [:2 ] = self ._format_number (self ._iterations + 1 ), self ._format_number (res ["target" ])
172+ cells [:2 ] = self ._format_number (self ._iterations + 1 ), self ._format_number (result ["target" ])
142173 if self ._is_constrained :
143- cells [2 ] = self ._format_bool (res ["allowed" ])
144- params = res .get ("params" , {})
174+ cells [2 ] = self ._format_bool (result ["allowed" ])
175+ params = result .get ("params" , {})
145176 cells [3 :] = [
146- instance .space ._params_config [key ].to_string (val , self ._default_cell_size )
147- for key , val in params .items ()
177+ self ._params_config [key ].to_string (val , self ._default_cell_size ) for key , val in params .items ()
148178 ]
149179
150180 return "| " + " | " .join (colour + x + self ._colour_reset for x in cells if x is not None ) + " |"
151181
152- def _header (self , instance : BayesianOptimization ) -> str :
182+ def _print_header (self , keys : list [ str ] ) -> str :
153183 """Print the header of the log.
154184
155185 Parameters
156186 ----------
157- instance : bayesian_optimization.BayesianOptimization
158- The instance associated with the header .
187+ keys : list[str]
188+ The parameter keys .
159189
160190 Returns
161191 -------
162192 A stringified, formatted version of the most header.
163193 """
164- keys : list [str ] = instance .space .keys
165194 # iter, target, allowed [, *params]
166195 cells : list [str | None ] = [None ] * (3 + len (keys ))
167196
@@ -174,42 +203,40 @@ def _header(self, instance: BayesianOptimization) -> str:
174203 self ._header_length = len (line )
175204 return line + "\n " + ("-" * self ._header_length )
176205
177- def _is_new_max (self , instance : BayesianOptimization ) -> bool :
206+ def _is_new_max (self , current_max : dict [ str , Any ] | None ) -> bool :
178207 """Check if the step to log produced a new maximum.
179208
180209 Parameters
181210 ----------
182- instance : bayesian_optimization.BayesianOptimization
183- The instance associated with the step .
211+ current_max : dict[str, Any] | None
212+ The current maximum target value and its parameters .
184213
185214 Returns
186215 -------
187216 boolean
188217 """
189- if instance . max is None :
218+ if current_max is None :
190219 # During constrained optimization, there might not be a maximum
191220 # value since the optimizer might've not encountered any points
192221 # that fulfill the constraints.
193222 return False
194223 if self ._previous_max is None :
195- self ._previous_max = instance . max ["target" ]
196- return instance . max ["target" ] > self ._previous_max
224+ self ._previous_max = current_max ["target" ]
225+ return current_max ["target" ] > self ._previous_max
197226
198- def _update_tracker (self , instance : BayesianOptimization ) -> None :
227+ def _update_tracker (self , current_max : dict [ str , Any ] | None ) -> None :
199228 """Update the tracker.
200229
201230 Parameters
202231 ----------
203- instance : bayesian_optimization.BayesianOptimization
204- The instance associated with the step .
232+ current_max : dict[str, Any] | None
233+ The current maximum target value and its parameters .
205234 """
206235 self ._iterations += 1
207236
208- if instance . max is None :
237+ if current_max is None :
209238 return
210239
211- current_max = instance .max
212-
213240 if self ._previous_max is None or current_max ["target" ] > self ._previous_max :
214241 self ._previous_max = current_max ["target" ]
215242 self ._previous_max_params = current_max ["params" ]
@@ -228,43 +255,45 @@ def _time_metrics(self) -> tuple[str, float, float]:
228255 self ._previous_time = now
229256 return (now .strftime ("%Y-%m-%d %H:%M:%S" ), time_elapsed .total_seconds (), time_delta .total_seconds ())
230257
231- def log_optimization_start (self , instance : BayesianOptimization ) -> None :
258+ def log_optimization_start (self , keys : list [ str ] ) -> None :
232259 """Log the start of the optimization process.
233260
234261 Parameters
235262 ----------
236- instance : BayesianOptimization
237- The instance associated with the event .
263+ keys : list[str]
264+ The parameter keys .
238265 """
239266 if self ._verbose :
240- line = self ._header ( instance ) + "\n "
267+ line = self ._print_header ( keys ) + "\n "
241268 print (line , end = "" )
242269
243- def log_optimization_step (self , instance : BayesianOptimization ) -> None :
270+ def log_optimization_step (
271+ self , keys : list [str ], result : dict [str , Any ], current_max : dict [str , Any ] | None
272+ ) -> None :
244273 """Log an optimization step.
245274
246275 Parameters
247276 ----------
248- instance : BayesianOptimization
249- The instance associated with the event.
277+ keys : list[str]
278+ The parameter keys.
279+
280+ result : dict[str, Any]
281+ The result dictionary for the most recent step.
282+
283+ current_max : dict[str, Any] | None
284+ The current maximum target value and its parameters.
250285 """
251- is_new_max = self ._is_new_max (instance )
252- self ._update_tracker (instance )
286+ is_new_max = self ._is_new_max (current_max )
287+ self ._update_tracker (current_max )
253288
254289 if self ._verbose != 1 or is_new_max :
255290 colour = self ._colour_new_max if is_new_max else self ._colour_regular_message
256- line = self ._step ( instance , colour = colour ) + "\n "
291+ line = self ._print_step ( result , keys , colour = colour ) + "\n "
257292 if self ._verbose :
258293 print (line , end = "" )
259294
260- def log_optimization_end (self , instance : BayesianOptimization ) -> None :
261- """Log the end of the optimization process.
262-
263- Parameters
264- ----------
265- instance : BayesianOptimization
266- The instance associated with the event.
267- """
295+ def log_optimization_end (self ) -> None :
296+ """Log the end of the optimization process."""
268297 if self ._verbose and self ._header_length is not None :
269298 line = "=" * self ._header_length + "\n "
270299 print (line , end = "" )
0 commit comments