Skip to content

Commit 5798286

Browse files
committed
Merge with custom_diff_evo
2 parents 0d9b90b + 962e5f9 commit 5798286

18 files changed

+652
-79
lines changed

doc/source/optimization.rst

Lines changed: 6 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -46,6 +46,12 @@ cache files, serving a value from the cache for the first time in the run also c
4646
Only unique function evaluations are counted, so the second time a parameter configuration is selected by the strategy it is served from the
4747
cache, but not counted as a unique function evaluation.
4848

49+
All optimization algorithms, except for brute_force, random_sample, and bayes_opt, allow the user to specify an initial guess or
50+
starting point for the optimization, called ``x0``. This can be passed to the strategy using the ``strategy_options=`` dictionary with ``"x0"`` as key and
51+
a list of values for each parameter in tune_params to note the starting point. For example, for a kernel that has parameters ``block_size_x`` (64, 128, 256)
52+
and ``tile_size_x`` (1,2,3), one could pass ``strategy_options=dict(x0=[128,2])`` to ``tune_kernel()`` to make sure the strategy starts from
53+
the configuration with ``block_size_x=128, tile_size_x=2``. The order in the ``x0`` list should match the order in the tunable parameters dictionary.
54+
4955
Below all the strategies are listed with their strategy-specific options that can be passed in a dictionary to the ``strategy_options=`` argument
5056
of ``tune_kernel()``.
5157

kernel_tuner/strategies/bayes_opt.py

Lines changed: 22 additions & 20 deletions
Original file line numberDiff line numberDiff line change
@@ -13,7 +13,7 @@
1313

1414
# BO imports
1515
from kernel_tuner.searchspace import Searchspace
16-
from kernel_tuner.strategies.common import CostFunc
16+
from kernel_tuner.strategies.common import CostFunc, get_options
1717
from kernel_tuner.util import StopCriterionReached
1818

1919
try:
@@ -26,6 +26,24 @@
2626

2727
supported_methods = ["poi", "ei", "lcb", "lcb-srinivas", "multi", "multi-advanced", "multi-fast", "multi-ultrafast"]
2828

29+
# _options dict is used for generating documentation, but is not used to check for unsupported strategy_options in bayes_opt
30+
_options = dict(
31+
covariancekernel=(
32+
'The Covariance kernel to use, choose any from "constantrbf", "rbf", "matern32", "matern52"',
33+
"matern32",
34+
),
35+
covariancelengthscale=("The covariance length scale", 1.5),
36+
method=(
37+
"The Bayesian Optimization method to use, choose any from " + ", ".join(supported_methods),
38+
"multi-ultrafast",
39+
),
40+
samplingmethod=(
41+
"Method used for initial sampling the parameter space, either random or Latin Hypercube Sampling (LHS)",
42+
"lhs",
43+
),
44+
popsize=("Number of initial samples", 20),
45+
)
46+
2947

3048
def generate_normalized_param_dicts(tune_params: dict, eps: float) -> Tuple[dict, dict]:
3149
"""Generates normalization and denormalization dictionaries."""
@@ -92,6 +110,9 @@ def tune(searchspace: Searchspace, runner, tuning_options):
92110
:rtype: list(dict()), dict()
93111
94112
"""
113+
# we don't actually use this for Bayesian Optimization, but it is used to check for unsupported options
114+
get_options(tuning_options.strategy_options, _options, unsupported=["x0"])
115+
95116
max_fevals = tuning_options.strategy_options.get("max_fevals", 100)
96117
prune_parameterspace = tuning_options.strategy_options.get("pruneparameterspace", True)
97118
if not bayes_opt_present:
@@ -143,25 +164,6 @@ def tune(searchspace: Searchspace, runner, tuning_options):
143164
return cost_func.results
144165

145166

146-
# _options dict is used for generating documentation, but is not used to check for unsupported strategy_options in bayes_opt
147-
_options = dict(
148-
covariancekernel=(
149-
'The Covariance kernel to use, choose any from "constantrbf", "rbf", "matern32", "matern52"',
150-
"matern32",
151-
),
152-
covariancelengthscale=("The covariance length scale", 1.5),
153-
method=(
154-
"The Bayesian Optimization method to use, choose any from " + ", ".join(supported_methods),
155-
"multi-ultrafast",
156-
),
157-
samplingmethod=(
158-
"Method used for initial sampling the parameter space, either random or Latin Hypercube Sampling (LHS)",
159-
"lhs",
160-
),
161-
popsize=("Number of initial samples", 20),
162-
)
163-
164-
165167
class BayesianOptimization:
166168
def __init__(
167169
self,

kernel_tuner/strategies/brute_force.py

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -6,6 +6,9 @@
66

77
def tune(searchspace: Searchspace, runner, tuning_options):
88

9+
# Force error on unsupported options
10+
common.get_options(tuning_options.strategy_options or [], _options, unsupported=["max_fevals", "time_limit", "x0", "searchspace_construction_options"])
11+
912
# call the runner
1013
return runner.run(searchspace.sorted_list(), tuning_options)
1114

kernel_tuner/strategies/common.py

Lines changed: 13 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -49,9 +49,12 @@ def make_strategy_options_doc(strategy_options):
4949
return doc
5050

5151

52-
def get_options(strategy_options, options):
52+
def get_options(strategy_options, options, unsupported=None):
5353
"""Get the strategy-specific options or their defaults from user-supplied strategy_options."""
54-
accepted = list(options.keys()) + ["max_fevals", "time_limit", "searchspace_construction_options"]
54+
accepted = list(options.keys()) + ["max_fevals", "time_limit", "x0", "searchspace_construction_options"]
55+
if unsupported:
56+
for key in unsupported:
57+
accepted.remove(key)
5558
for key in strategy_options:
5659
if key not in accepted:
5760
raise ValueError(f"Unrecognized option {key} in strategy_options (allowed: {accepted})")
@@ -195,6 +198,11 @@ def __call__(self, x, check_restrictions=True):
195198

196199
return return_value
197200

201+
def get_start_pos(self):
202+
"""Get starting position for optimization."""
203+
_, x0, _ = self.get_bounds_x0_eps()
204+
return x0
205+
198206
def get_bounds_x0_eps(self):
199207
"""Compute bounds, x0 (the initial guess), and eps."""
200208
values = list(self.searchspace.tune_params.values())
@@ -211,20 +219,16 @@ def get_bounds_x0_eps(self):
211219
bounds = [(0, eps * len(v)) for v in values]
212220
if x0:
213221
# x0 has been supplied by the user, map x0 into [0, eps*len(v)]
214-
x0 = scale_from_params(x0, self.tuning_options, eps)
222+
x0 = scale_from_params(x0, self.searchspace.tune_params, eps)
215223
else:
216224
# get a valid x0
217225
pos = list(self.searchspace.get_random_sample(1)[0])
218226
x0 = scale_from_params(pos, self.searchspace.tune_params, eps)
219227
else:
220228
bounds = self.get_bounds()
221229
if not x0:
222-
x0 = [(min_v + max_v) / 2.0 for (min_v, max_v) in bounds]
223-
eps = 1e9
224-
for v_list in values:
225-
if len(v_list) > 1:
226-
vals = np.sort(v_list)
227-
eps = min(eps, np.amin(np.gradient(vals)))
230+
x0 = list(self.searchspace.get_random_sample(1)[0])
231+
eps = 1
228232

229233
self.tuning_options["eps"] = eps
230234
logging.debug("get_bounds_x0_eps called")

0 commit comments

Comments
 (0)