Skip to content

Commit 885fe09

Browse files
committed
Use skopt.Optimizer object API, instead of skopt.gp_minimize function
1 parent 2e4c67c commit 885fe09

File tree

1 file changed

+69
-47
lines changed

1 file changed

+69
-47
lines changed

kernel_tuner/strategies/skopt.py

Lines changed: 69 additions & 47 deletions
Original file line numberDiff line numberDiff line change
@@ -1,76 +1,98 @@
1-
"""The strategy that uses a minimizer method for searching through the parameter space."""
1+
"""The strategy that uses the optimizer from skopt for searching through the parameter space."""
22

3+
import numpy as np
34
from kernel_tuner.util import StopCriterionReached
45
from kernel_tuner.searchspace import Searchspace
56
from kernel_tuner.strategies.common import (
67
CostFunc,
78
get_options,
8-
snap_to_nearest_config,
99
get_strategy_docstring,
1010
)
1111

12-
supported_methods = ["forest", "gbrt", "gp", "dummy"]
12+
supported_learners = ["RF", "ET", "GBRT", "DUMMY", "GP"]
13+
supported_acq = ["LCB", "EI", "PI","gp_hedge"]
14+
supported_liars = ["cl_min", "cl_mean", "cl_max"]
1315

1416
_options = dict(
15-
method=(f"Local optimization algorithm to use, choose any from {supported_methods}", "gp"),
16-
options=("Options passed to the skopt method as kwargs.", dict()),
17-
popsize=("Number of initial samples. If `None`, let skopt choose the initial population", None),
18-
maxiter=("Maximum number of times to repeat the method until the budget is exhausted.", 1),
17+
learner=(f"The leaner to use (supported: {supported_learners})", "RF"),
18+
acq_func=(f"The acquisition function to use (supported: {supported_acq})", "gp_hedge"),
19+
lie_strategy=(f"The lie strategy to use when using batches (supported: {supported_liars})", "cl_max"),
20+
kappa=("The value of kappa", 1.96),
21+
num_initial=("Number of initial samples. If `None`, let skopt choose the initial population", None),
22+
batch_size=("The number of points to ask per batch", 1),
23+
skopt_kwargs=("Additional options passed to the skopt `Optimizer` as kwargs.", dict()),
1924
)
2025

2126

2227
def tune(searchspace: Searchspace, runner, tuning_options):
23-
import skopt
24-
25-
method, skopt_options, popsize, maxiter = get_options(tuning_options.strategy_options, _options)
28+
learner, acq_func, lie_strategy, kappa, num_initial, batch_size, skopt_kwargs = \
29+
get_options(tuning_options.strategy_options, _options)
2630

2731
# Get maximum number of evaluations
28-
max_fevals = searchspace.size
29-
if "max_fevals" in tuning_options:
30-
max_fevals = min(tuning_options["max_fevals"], max_fevals)
31-
32-
# Set the maximum number of calls to 100 times the maximum number of evaluations.
33-
# Not all calls by skopt will result in an evaluation since different calls might
34-
# map to the same configuration.
35-
if "n_calls" not in skopt_options:
36-
skopt_options["n_calls"] = 100 * max_fevals
37-
38-
# If the initial population size is specified, we select `popsize` samples
39-
# from the search space. This is more efficient than letting skopt select
40-
# the samples as it is not aware of restrictions.
41-
if popsize:
42-
x0 = searchspace.get_random_sample(min(popsize, max_fevals))
43-
skopt_options["x0"] = [searchspace.get_param_indices(x) for x in x0]
44-
45-
opt_result = None
46-
tune_params_values = list(searchspace.tune_params.values())
47-
bounds = [(0, len(p) - 1) if len(p) > 1 else [0] for p in tune_params_values]
32+
max_fevals = min(tuning_options.get("max_fevals", np.inf), searchspace.size)
4833

34+
# Const function
4935
cost_func = CostFunc(searchspace, tuning_options, runner)
50-
objective = lambda x: cost_func(searchspace.get_param_config_from_param_indices(x))
51-
space_constraint = lambda x: searchspace.is_param_config_valid(searchspace.get_param_config_from_param_indices(x))
36+
opt_config, opt_result = None, None
5237

53-
skopt_options["space_constraint"] = space_constraint
54-
skopt_options["verbose"] = tuning_options.verbose
38+
# The dimensions. Parameters with one value become categorical
39+
from skopt.space.space import Categorical, Integer
40+
tune_params_values = list(searchspace.tune_params.values())
41+
bounds = [Integer(0, len(p) - 1) if len(p) > 1 else Categorical([0]) for p in tune_params_values]
42+
43+
# Space constraint
44+
space_constraint = lambda x: searchspace.is_param_config_valid(
45+
searchspace.get_param_config_from_param_indices(x))
46+
47+
# Create skopt optimizer
48+
skopt_kwargs = dict(skopt_kwargs)
49+
skopt_kwargs.setdefault("acq_func_kwargs", {})["kappa"] = kappa
50+
51+
from skopt import Optimizer as SkOptimizer
52+
optimizer = SkOptimizer(
53+
dimensions=bounds,
54+
base_estimator=learner,
55+
n_initial_points=num_initial,
56+
acq_func=acq_func,
57+
space_constraint=space_constraint,
58+
**skopt_kwargs
59+
)
60+
61+
# Ask initial batch of configs
62+
num_initial = optimizer._n_initial_points
63+
batch = optimizer.ask(num_initial, lie_strategy)
64+
Xs, Ys = [], []
65+
eval_count = 0
66+
67+
if tuning_options.verbose:
68+
print(f"Asked optimizer for {num_initial} points: {batch}")
5569

5670
try:
57-
for _ in range(maxiter):
58-
if method == "dummy":
59-
opt_result = skopt.dummy_minimize(objective, bounds, **skopt_options)
60-
elif method == "forest":
61-
opt_result = skopt.forest_minimize(objective, bounds, **skopt_options)
62-
elif method == "gp":
63-
opt_result = skopt.gp_minimize(objective, bounds, **skopt_options)
64-
elif method == "gbrt":
65-
opt_result = skopt.gbrt_minimize(objective, bounds, **skopt_options)
66-
else:
67-
raise ValueError(f"invalid skopt method: {method}")
71+
while eval_count < max_fevals:
72+
if not batch:
73+
optimizer.tell(Xs, Ys)
74+
batch = optimizer.ask(batch_size, lie_strategy)
75+
Xs, Ys = [], []
76+
77+
if tuning_options.verbose:
78+
print(f"Asked optimizer for {batch_size} points: {batch}")
79+
80+
x = batch.pop(0)
81+
y = cost_func(searchspace.get_param_config_from_param_indices(x))
82+
eval_count += 1
83+
84+
Xs.append(x)
85+
Ys.append(y)
86+
87+
if opt_result is None or y < opt_result:
88+
opt_config, opt_result = x, y
89+
6890
except StopCriterionReached as e:
6991
if tuning_options.verbose:
7092
print(e)
7193

72-
if opt_result and tuning_options.verbose:
73-
print(opt_result)
94+
if opt_result is not None and tuning_options.verbose:
95+
print(f"Best configuration: {opt_result}")
7496

7597
return cost_func.results
7698

0 commit comments

Comments
 (0)