Skip to content

Commit a0a6c8d

Browse files
committed
Add initial support for scikit-optimize minimize methods (skopt)
1 parent f9b223b commit a0a6c8d

File tree

3 files changed

+80
-1
lines changed

3 files changed

+80
-1
lines changed

kernel_tuner/interface.py

Lines changed: 5 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -65,6 +65,7 @@
6565
pyatf_strategies,
6666
random_sample,
6767
simulated_annealing,
68+
skopt
6869
)
6970
from kernel_tuner.strategies.wrapper import OptAlgWrapper
7071

@@ -82,6 +83,7 @@
8283
"mls": mls,
8384
"pso": pso,
8485
"simulated_annealing": simulated_annealing,
86+
"skopt": skopt,
8587
"firefly_algorithm": firefly_algorithm,
8688
"bayes_opt": bayes_opt,
8789
"pyatf_strategies": pyatf_strategies,
@@ -394,6 +396,7 @@ def __deepcopy__(self, _):
394396
* "pso" particle swarm optimization
395397
* "random_sample" takes a random sample of the search space
396398
* "simulated_annealing" simulated annealing strategy
399+
* "skopt" uses the minimization methods from `skopt`
397400
398401
Strategy-specific parameters and options are explained under strategy_options.
399402
@@ -594,6 +597,7 @@ def tune_kernel(
594597

595598
kernelsource = core.KernelSource(kernel_name, kernel_source, lang, defines)
596599

600+
print("block_size_names", block_size_names)
597601
_check_user_input(kernel_name, kernelsource, arguments, block_size_names)
598602

599603
# default objective if none is specified
@@ -676,6 +680,7 @@ def preprocess_cache(filepath):
676680

677681
# create search space
678682
tuning_options.restrictions_unmodified = deepcopy(restrictions)
683+
print(searchspace_construction_options)
679684
searchspace = Searchspace(tune_params, restrictions, runner.dev.max_threads, **searchspace_construction_options)
680685
restrictions = searchspace._modified_restrictions
681686
tuning_options.restrictions = restrictions

kernel_tuner/strategies/common.py

Lines changed: 3 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -73,6 +73,7 @@ def __init__(
7373
snap=True,
7474
return_invalid=False,
7575
return_raw=None,
76+
invalid_value=sys.float_info.max,
7677
):
7778
"""An abstract method to handle evaluation of configurations.
7879
@@ -100,6 +101,7 @@ def __init__(
100101
self.return_raw = f"{tuning_options['objective']}s"
101102
self.results = []
102103
self.budget_spent_fraction = 0.0
104+
self.invalid_return_value = invalid_value
103105

104106

105107
def __call__(self, x, check_restrictions=True):
@@ -168,7 +170,7 @@ def __call__(self, x, check_restrictions=True):
168170
else:
169171
# this is not a valid configuration, replace with float max if needed
170172
if not self.return_invalid:
171-
return_value = sys.float_info.max
173+
return_value = self.invalid_return_value
172174

173175
# include raw data in return if requested
174176
if self.return_raw is not None:

kernel_tuner/strategies/skopt.py

Lines changed: 72 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,72 @@
1+
"""The strategy that uses a minimizer method for searching through the parameter space."""
2+
3+
from kernel_tuner.util import StopCriterionReached
4+
from kernel_tuner.searchspace import Searchspace
5+
from kernel_tuner.strategies.common import (
6+
CostFunc,
7+
get_options,
8+
scale_from_params,
9+
get_strategy_docstring,
10+
)
11+
12+
supported_methods = ["forest", "gbrt", "gp", "dummy"]
13+
14+
_options = dict(
15+
method=(f"Local optimization algorithm to use, choose any from {supported_methods}", "gp"),
16+
options=("Options passed to the skopt method as kwargs.", dict()),
17+
popsize=("Number of initial samples. If `None`, let skopt choose the initial population", None),
18+
maxiter=("Maximum number of times to repeat the method until the budget is exhausted.", 1),
19+
)
20+
21+
def tune(searchspace: Searchspace, runner, tuning_options):
22+
import skopt
23+
24+
cost_func = CostFunc(searchspace, tuning_options, runner, scaling=True, invalid_value=1e9)
25+
bounds, _, eps = cost_func.get_bounds_x0_eps()
26+
27+
method, skopt_options, popsize, maxiter = get_options(tuning_options.strategy_options, _options)
28+
29+
# Get maximum number of evaluations
30+
max_fevals = searchspace.size
31+
if "max_fevals" in tuning_options:
32+
max_fevals = min(tuning_options["max_fevals"], max_fevals)
33+
34+
# Set the maximum number of calls to 100 times the maximum number of evaluations.
35+
# Not all calls by skopt will result in an evaluation, due to restrictions or
36+
# since different calls might map to the same configuration.
37+
if "n_calls" not in skopt_options:
38+
skopt_options["n_calls"] = 100 * max_fevals
39+
40+
# If the initial population size is specified, we select `popsize` samples
41+
# from the search space. This is more efficient than letting skopt select
42+
# the samples as it is not aware of restrictions.
43+
if popsize:
44+
x0 = searchspace.get_random_sample(min(popsize, max_fevals))
45+
skopt_options["x0"] = [list(scale_from_params(x, searchspace.tune_params, eps)) for x in x0]
46+
47+
48+
opt_result = None
49+
50+
try:
51+
for _ in range(maxiter):
52+
if method == "dummy":
53+
opt_result = skopt.dummy_minimize(cost_func, bounds, **skopt_options)
54+
elif method == "forest":
55+
opt_result = skopt.forest_minimize(cost_func, bounds, **skopt_options)
56+
elif method == "gp":
57+
opt_result = skopt.gp_minimize(cost_func, bounds, **skopt_options)
58+
elif method == "gbrt":
59+
opt_result = skopt.gbrt_minimize(cost_func, bounds, **skopt_options)
60+
else:
61+
raise ValueError(f"invalid skopt method: {method}")
62+
except StopCriterionReached as e:
63+
if tuning_options.verbose:
64+
print(e)
65+
66+
if opt_result and tuning_options.verbose:
67+
print(opt_result.message)
68+
69+
return cost_func.results
70+
71+
72+
tune.__doc__ = get_strategy_docstring("skopt minimize", _options)

0 commit comments

Comments
 (0)