|
27 | 27 |
|
28 | 28 | import numpy as np |
29 | 29 | from numpy.random import RandomState |
30 | | -from scipy.optimize import minimize |
| 30 | +from packaging import version |
| 31 | +from scipy import __version__ as scipy_version |
| 32 | +from scipy.optimize._differentialevolution import DifferentialEvolutionSolver, minimize |
31 | 33 | from scipy.special import softmax |
32 | 34 | from scipy.stats import norm |
33 | 35 | from sklearn.gaussian_process import GaussianProcessRegressor |
@@ -269,7 +271,7 @@ def _acq_min( |
269 | 271 | acq, space, n_random=max(n_random, n_l_bfgs_b), n_x_seeds=n_l_bfgs_b |
270 | 272 | ) |
271 | 273 | if n_l_bfgs_b: |
272 | | - x_min_l, min_acq_l = self._l_bfgs_b_minimize(acq, space, x_seeds=x_seeds) |
| 274 | + x_min_l, min_acq_l = self._smart_minimize(acq, space, x_seeds=x_seeds) |
273 | 275 | # Either n_random or n_l_bfgs_b is not 0 => at least one of x_min_r and x_min_l is not None |
274 | 276 | if min_acq_r > min_acq_l: |
275 | 277 | return x_min_l |
@@ -318,7 +320,7 @@ def _random_sample_minimize( |
318 | 320 | x_seeds = [] |
319 | 321 | return x_min, min_acq, x_seeds |
320 | 322 |
|
321 | | - def _l_bfgs_b_minimize( |
| 323 | + def _smart_minimize( |
322 | 324 | self, |
323 | 325 | acq: Callable[[NDArray[Float]], NDArray[Float]], |
324 | 326 | space: TargetSpace, |
@@ -348,38 +350,66 @@ def _l_bfgs_b_minimize( |
348 | 350 | continuous_dimensions = space.continuous_dimensions |
349 | 351 | continuous_bounds = space.bounds[continuous_dimensions] |
350 | 352 |
|
351 | | - if not continuous_dimensions.any(): |
352 | | - min_acq = np.inf |
353 | | - x_min = np.array([np.nan] * space.bounds.shape[0]) |
354 | | - return x_min, min_acq |
355 | | - |
356 | 353 | min_acq: float | None = None |
357 | 354 | x_try: NDArray[Float] |
358 | 355 | x_min: NDArray[Float] |
359 | | - for x_try in x_seeds: |
360 | 356 |
|
361 | | - def continuous_acq(x: NDArray[Float], x_try=x_try) -> NDArray[Float]: |
362 | | - x_try[continuous_dimensions] = x |
363 | | - return acq(x_try) |
| 357 | + # Case of continous optimization |
| 358 | + if all(continuous_dimensions): |
| 359 | + for x_try in x_seeds: |
| 360 | + res: OptimizeResult = minimize(acq, x_try, bounds=continuous_bounds, method="L-BFGS-B") |
| 361 | + if not res.success: |
| 362 | + continue |
364 | 363 |
|
365 | | - # Find the minimum of minus the acquisition function |
366 | | - res: OptimizeResult = minimize( |
367 | | - continuous_acq, x_try[continuous_dimensions], bounds=continuous_bounds, method="L-BFGS-B" |
368 | | - ) |
369 | | - # See if success |
370 | | - if not res.success: |
371 | | - continue |
| 364 | + # Store it if better than previous minimum(maximum). |
| 365 | + if min_acq is None or np.squeeze(res.fun) >= min_acq: |
| 366 | + x_try = res.x |
| 367 | + x_min = x_try |
| 368 | + min_acq = np.squeeze(res.fun) |
372 | 369 |
|
373 | | - # Store it if better than previous minimum(maximum). |
374 | | - if min_acq is None or np.squeeze(res.fun) >= min_acq: |
375 | | - x_try[continuous_dimensions] = res.x |
376 | | - x_min = x_try |
377 | | - min_acq = np.squeeze(res.fun) |
| 370 | + # Case of mixed-integer optimization |
| 371 | + else: |
| 372 | + ntrials = max(1, len(x_seeds) // 100) |
| 373 | + |
| 374 | + for _ in range(ntrials): |
| 375 | + xinit = space.random_sample(15 * len(space.bounds), random_state=self.random_state) |
| 376 | + |
| 377 | + de_parameters = {"func": acq, "bounds": space.bounds, "polish": False, "init": xinit} |
| 378 | + if version.parse(scipy_version) < version.parse("1.15.0"): |
| 379 | + de_parameters["seed"] = self.random_state |
| 380 | + else: |
| 381 | + de_parameters["rng"] = self.random_state |
| 382 | + |
| 383 | + de = DifferentialEvolutionSolver(**de_parameters) |
| 384 | + res_de: OptimizeResult = de.solve() |
| 385 | + # Check if success |
| 386 | + if not res_de.success: |
| 387 | + continue |
| 388 | + |
| 389 | + x_min = res_de.x |
| 390 | + min_acq = np.squeeze(res_de.fun) |
| 391 | + |
| 392 | + # Refine the identification of continous parameters with deterministic search |
| 393 | + if any(continuous_dimensions): |
| 394 | + x_try = x_min.copy() |
| 395 | + |
| 396 | + def continuous_acq(x: NDArray[Float], x_try=x_try) -> NDArray[Float]: |
| 397 | + x_try[continuous_dimensions] = x |
| 398 | + return acq(x_try) |
| 399 | + |
| 400 | + res: OptimizeResult = minimize( |
| 401 | + continuous_acq, x_min[continuous_dimensions], bounds=continuous_bounds |
| 402 | + ) |
| 403 | + if np.squeeze(res.fun) >= min_acq and res.success: |
| 404 | + x_try[continuous_dimensions] = res.x |
| 405 | + x_min = x_try |
| 406 | + min_acq = np.squeeze(res.fun) |
378 | 407 |
|
379 | 408 | if min_acq is None: |
380 | 409 | min_acq = np.inf |
381 | 410 | x_min = np.array([np.nan] * space.bounds.shape[0]) |
382 | 411 |
|
| 412 | + x_min = space.kernel_transform(x_min).reshape(x_min.shape) |
383 | 413 | # Clip output to make sure it lies within the bounds. Due to floating |
384 | 414 | # point technicalities this is not always the case. |
385 | 415 | return np.clip(x_min, space.bounds[:, 0], space.bounds[:, 1]), min_acq |
|
0 commit comments