@@ -375,19 +375,35 @@ def _smart_minimize(
375375 ntrials = max (1 , len (x_seeds ) // 100 )
376376 for _ in range (ntrials ):
377377 xinit = space .random_sample (15 * len (space .bounds ), random_state = self .random_state )
378- de = DifferentialEvolutionSolver (acq , bounds = space .bounds , init = xinit , rng = self .random_state )
379- res : OptimizeResult = de .solve ()
380-
381- # See if success
382- if not res .success :
378+ de = DifferentialEvolutionSolver (
379+ acq , bounds = space .bounds , init = xinit , rng = self .random_state , polish = False
380+ )
381+ res_de : OptimizeResult = de .solve ()
382+ # Check if success
383+ if not res_de .success :
383384 continue
384385
385- # Store it if better than previous minimum(maximum).
386- if min_acq is None or np .squeeze (res .fun ) >= min_acq :
387- x_try_sc = de ._unscale_parameters (res .x )
388- x_try = space .kernel_transform (x_try_sc ).flatten ()
389- x_min = x_try
390- min_acq = np .squeeze (res .fun )
386+ x_min = res_de .x
387+ min_acq = np .squeeze (res_de .fun )
388+
389+ # Refine the identification of continous parameters with deterministic search
390+ if any (continuous_dimensions ):
391+ x_try = x_min .copy ()
392+
393+ def continuous_acq (x : NDArray [Float ], x_try = x_try ) -> NDArray [Float ]:
394+ x_try [continuous_dimensions ] = x
395+ return acq (x_try )
396+
397+ res : OptimizeResult = minimize (
398+ continuous_acq ,
399+ x_min [continuous_dimensions ],
400+ bounds = continuous_bounds ,
401+ method = "L-BFGS-B" ,
402+ )
403+ if np .squeeze (res .fun ) >= min_acq and res .success :
404+ x_try [continuous_dimensions ] = res .x
405+ x_min = x_try
406+ min_acq = np .squeeze (res .fun )
391407
392408 if min_acq is None :
393409 min_acq = np .inf
0 commit comments