Skip to content

Commit 1f935a1

Browse files
committed
Merge branch 'hyperparametertuning' of https://github.com/KernelTuner/kernel_tuner into hyperparametertuning
2 parents dcd102b + b820419 commit 1f935a1

36 files changed

+866
-2974
lines changed

.gitignore

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -2,7 +2,7 @@
22
poetry.lock
33
noxenv.txt
44
noxsettings.toml
5-
hyperparamtuning/*
5+
hyperparamtuning*/*
66
*.prof
77

88
### Python ###

doc/requirements.txt

Lines changed: 86 additions & 85 deletions
Large diffs are not rendered by default.

doc/requirements_test.txt

Lines changed: 557 additions & 345 deletions
Large diffs are not rendered by default.

kernel_tuner/backends/hypertuner.py

Lines changed: 9 additions & 11 deletions
Original file line numberDiff line numberDiff line change
@@ -61,26 +61,23 @@ def compile(self, kernel_instance):
6161
path.mkdir(exist_ok=True)
6262

6363
# TODO get applications & GPUs args from benchmark
64-
# gpus = ["RTX_3090", "RTX_2080_Ti"]
65-
# applications = None
66-
67-
gpus = ["A100", "W6600"]
68-
folder = "../../autotuning_methodology/benchmark_hub/kernels"
64+
gpus = ["A100", "A4000", "MI250X"]
65+
folder = "../autotuning_methodology/benchmark_hub/kernels"
6966
applications = [
7067
{
7168
"name": "dedispersion_milo",
7269
"folder": folder,
7370
"input_file": "dedispersion_milo.json"
7471
},
7572
{
76-
"name": "convolution_milo",
73+
"name": "hotspot_milo",
7774
"folder": folder,
78-
"input_file": "convolution_milo.json"
75+
"input_file": "hotspot_milo.json"
7976
},
8077
{
81-
"name": "hotspot_milo",
78+
"name": "convolution_milo",
8279
"folder": folder,
83-
"input_file": "hotspot_milo.json"
80+
"input_file": "convolution_milo.json"
8481
},
8582
{
8683
"name": "gemm_milo",
@@ -104,8 +101,9 @@ def compile(self, kernel_instance):
104101
# any additional settings
105102
override = {
106103
"experimental_groups_defaults": {
107-
"repeats": 10,
108-
"samples": self.iterations
104+
"repeats": 25,
105+
"samples": self.iterations,
106+
"minimum_fraction_of_budget_valid": 0.01,
109107
}
110108
}
111109

kernel_tuner/hyper.py

Lines changed: 20 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -103,16 +103,16 @@ def put_if_not_present(target_dict, key, value):
103103
hyperparams = {
104104
'popsize': [10, 20, 30],
105105
'maxiter': [50, 100, 150],
106-
'w': [0.25, 0.5, 0.75],
106+
# 'w': [0.25, 0.5, 0.75], # disabled due to low influence according to KW-test (H=0.0215) and mutual information
107107
'c1': [1.0, 2.0, 3.0],
108108
'c2': [0.5, 1.0, 1.5]
109109
}
110110
elif strategy_to_tune.lower() == "greedy_ils":
111111
hyperparams = {
112112
'neighbor': ['Hamming', 'adjacent'],
113113
'restart': [True, False],
114-
'no_improvement': [1, 10, 25, 33, 50, 66, 75, 100, 200],
115-
'random_walk': [0.01, 0.1, 0.2, 0.3, 0.4, 0.5, 0.6, 0.7, 0.8, 0.9, 0.99]
114+
'no_improvement': [10, 25, 50, 75],
115+
'random_walk': [0.1, 0.2, 0.3, 0.4, 0.5]
116116
}
117117
elif strategy_to_tune.lower() == "dual_annealing":
118118
hyperparams = {
@@ -127,7 +127,7 @@ def put_if_not_present(target_dict, key, value):
127127
elif strategy_to_tune.lower() == "basinhopping":
128128
hyperparams = {
129129
'method': ["Nelder-Mead", "Powell", "CG", "BFGS", "L-BFGS-B", "TNC", "COBYLA", "SLSQP"],
130-
'T': [0.5, 1.0, 1.5],
130+
'T': [0.1, 0.25, 0.5, 0.75, 1.0, 1.25, 1.5],
131131
}
132132
elif strategy_to_tune.lower() == "genetic_algorithm":
133133
hyperparams = {
@@ -136,12 +136,27 @@ def put_if_not_present(target_dict, key, value):
136136
'maxiter': [50, 100, 150],
137137
'mutation_chance': [5, 10, 20]
138138
}
139-
elif strategy_to_tune.lower() == "mls":
139+
elif strategy_to_tune.lower() == "greedy_mls":
140140
hyperparams = {
141141
'neighbor': ["Hamming", "adjacent"],
142142
'restart': [True, False],
143143
'randomize': [True, False]
144144
}
145+
elif strategy_to_tune.lower() == "simulated_annealing":
146+
hyperparams = {
147+
'T': [0.5, 1.0, 1.5],
148+
'T_min': [0.0001, 0.001, 0.01],
149+
'alpha': [0.9925, 0.995, 0.9975],
150+
'maxiter': [1, 2, 3]
151+
}
152+
elif strategy_to_tune.lower() == "bayes_opt":
153+
hyperparams = {
154+
# 'covariancekernel': ["constantrbf", "rbf", "matern32", "matern52"],
155+
'covariancelengthscale': [1.0, 1.5, 2.0],
156+
'method': ["poi", "ei", "lcb", "lcb-srinivas", "multi", "multi-advanced", "multi-fast", "multi-ultrafast"],
157+
'samplingmethod': ["random", "LHS"],
158+
'popsize': [10, 20, 30]
159+
}
145160
else:
146161
raise ValueError(f"Invalid argument {strategy_to_tune=}")
147162

kernel_tuner/integration.py

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -4,7 +4,7 @@
44

55
from jsonschema import validate
66

7-
from kernel_tuner import util
7+
from kernel_tuner.util import get_instance_string, looks_like_a_filename, read_file
88

99
#specifies for a number of pre-defined objectives whether
1010
#the objective should be minimized or maximized (boolean value denotes higher is better)
@@ -205,8 +205,8 @@ def top_result(item):
205205
meta["version_number"] = "1.0"
206206
meta["kernel_name"] = kernel_name
207207
if kernel_string and not callable(kernel_string) and not isinstance(kernel_string, list):
208-
if util.looks_like_a_filename(kernel_string):
209-
meta["kernel_string"] = util.read_file(kernel_string)
208+
if looks_like_a_filename(kernel_string):
209+
meta["kernel_string"] = read_file(kernel_string)
210210
else:
211211
meta["kernel_string"] = kernel_string
212212
meta["objective"] = objective
@@ -337,7 +337,7 @@ def _select_best_common_config(results, objective, objective_higher_is_better):
337337
for config in results:
338338
params = config["tunable_parameters"]
339339

340-
config_str = util.get_instance_string(params)
340+
config_str = get_instance_string(params)
341341
#count occurances
342342
results_table[config_str] = results_table.get(config_str,0) + 1
343343
#add to performance

kernel_tuner/interface.py

Lines changed: 13 additions & 11 deletions
Original file line numberDiff line numberDiff line change
@@ -50,11 +50,6 @@
5050
from kernel_tuner.strategies import (
5151
basinhopping,
5252
bayes_opt,
53-
bayes_opt_alt_BOTorch,
54-
bayes_opt_BOTorch,
55-
bayes_opt_GPyTorch,
56-
bayes_opt_GPyTorch_lean,
57-
bayes_opt_old,
5853
brute_force,
5954
diff_evo,
6055
dual_annealing,
@@ -85,12 +80,7 @@
8580
"pso": pso,
8681
"simulated_annealing": simulated_annealing,
8782
"firefly_algorithm": firefly_algorithm,
88-
"bayes_opt": bayes_opt,
89-
"bayes_opt_old": bayes_opt_old,
90-
"bayes_opt_GPyTorch": bayes_opt_GPyTorch,
91-
"bayes_opt_GPyTorch_lean": bayes_opt_GPyTorch_lean,
92-
"bayes_opt_BOTorch": bayes_opt_BOTorch,
93-
"bayes_opt_BOTorch_alt": bayes_opt_alt_BOTorch,
83+
"bayes_opt": bayes_opt
9484
}
9585

9686

@@ -886,6 +876,16 @@ def tune_kernel_T1(
886876
problem_size = kernelspec["ProblemSize"]
887877
device = kernelspec["Device"]["Name"]
888878
strategy = inputs["Search"]["Name"]
879+
if "Attributes" in inputs["Search"]:
880+
strategy_options = {}
881+
for attribute in inputs["Search"]["Attributes"]:
882+
strategy_options[attribute["Name"]] = attribute["Value"]
883+
if "Budget" in inputs:
884+
budget = inputs["Budget"][0]
885+
assert budget["Type"] == "ConfigurationCount"
886+
if strategy_options is None:
887+
strategy_options = {}
888+
strategy_options["max_fevals"] = budget["BudgetValue"]
889889

890890
# set the cache path
891891
if cache_filepath is None and "SimulationInput" in kernelspec:
@@ -908,6 +908,8 @@ def tune_kernel_T1(
908908
tune_param = eval(vals)
909909
else:
910910
tune_param = literal_eval(vals)
911+
if param["Type"] == "string":
912+
tune_param = eval(param["Values"])
911913
if tune_param is not None:
912914
tune_params[param["Name"]] = tune_param
913915
else:

kernel_tuner/searchspace.py

Lines changed: 8 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -4,6 +4,7 @@
44
from pathlib import Path
55
from random import choice, shuffle
66
from typing import List, Union
7+
from warnings import warn
78

89
import numpy as np
910
from constraint import (
@@ -69,9 +70,7 @@ def __init__(
6970
), "When `from_cache` is used, the positional arguments must be set to None."
7071
tune_params = from_cache["tune_params"]
7172
if from_cache is None:
72-
assert (
73-
tune_params is not None and max_threads is not None
74-
), "Must specify positional arguments."
73+
assert tune_params is not None and max_threads is not None, "Must specify positional arguments."
7574

7675
# set the object attributes using the arguments
7776
framework_l = framework.lower()
@@ -237,6 +236,7 @@ def __build_searchspace_bruteforce(self, block_size_names: list, max_threads: in
237236
isinstance(self._modified_restrictions, list)
238237
and block_size_restriction_spaced not in self._modified_restrictions
239238
):
239+
print(f"added default block size restriction '{block_size_restriction_spaced}'")
240240
self._modified_restrictions.append(block_size_restriction_spaced)
241241
if isinstance(self.restrictions, list):
242242
self.restrictions.append(block_size_restriction_spaced)
@@ -864,6 +864,11 @@ def get_random_sample_indices(self, num_samples: int) -> np.ndarray:
864864

865865
def get_random_sample(self, num_samples: int) -> List[tuple]:
866866
"""Get the parameter configurations for a random, non-conflicting sample (caution: not unique in consecutive calls)."""
867+
if self.size < num_samples:
868+
warn(
869+
f"Too many samples requested ({num_samples}), reducing the number of samples to the searchspace size ({self.size})"
870+
)
871+
num_samples = self.size
867872
return self.get_param_configs_at_indices(self.get_random_sample_indices(num_samples))
868873

869874
def get_neighbors_indices_no_cache(self, param_config: tuple, neighbor_method=None) -> List[int]:

kernel_tuner/strategies/basinhopping.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,7 @@
11
"""The strategy that uses the basinhopping global optimization method."""
22
import scipy.optimize
33

4-
from kernel_tuner import util
4+
from kernel_tuner.util import StopCriterionReached
55
from kernel_tuner.searchspace import Searchspace
66
from kernel_tuner.strategies import common
77
from kernel_tuner.strategies.common import CostFunc, setup_method_arguments, setup_method_options
@@ -31,7 +31,7 @@ def tune(searchspace: Searchspace, runner, tuning_options):
3131
try:
3232
opt_result = scipy.optimize.basinhopping(cost_func, x0, T=T, stepsize=eps,
3333
minimizer_kwargs=minimizer_kwargs, disp=tuning_options.verbose)
34-
except util.StopCriterionReached as e:
34+
except StopCriterionReached as e:
3535
if tuning_options.verbose:
3636
print(e)
3737

kernel_tuner/strategies/bayes_opt.py

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -149,7 +149,7 @@ def tune(searchspace: Searchspace, runner, tuning_options):
149149
if max_fevals - bo.fevals <= 0:
150150
raise ValueError("No function evaluations left for optimization after sampling")
151151
bo.optimize(max_fevals)
152-
except util.StopCriterionReached as e:
152+
except StopCriterionReached as e:
153153
if tuning_options.verbose:
154154
print(e)
155155

@@ -238,7 +238,7 @@ def get_hyperparam(name: str, default, supported_values=list()):
238238
self.invalid_value = 1e20
239239
self.opt_direction = opt_direction
240240
if opt_direction == "min":
241-
self.worst_value = np.PINF
241+
self.worst_value = np.inf
242242
self.argopt = np.argmin
243243
elif opt_direction == "max":
244244
self.worst_value = np.NINF
@@ -265,7 +265,7 @@ def get_hyperparam(name: str, default, supported_values=list()):
265265
self.__visited_num = 0
266266
self.__visited_valid_num = 0
267267
self.__visited_searchspace_indices = [False] * self.searchspace_size
268-
self.__observations = [np.NaN] * self.searchspace_size
268+
self.__observations = [np.nan] * self.searchspace_size
269269
self.__valid_observation_indices = [False] * self.searchspace_size
270270
self.__valid_params = list()
271271
self.__valid_observations = list()
@@ -314,7 +314,7 @@ def is_not_visited(self, index: int) -> bool:
314314

315315
def is_valid(self, observation: float) -> bool:
316316
"""Returns whether an observation is valid."""
317-
return not (observation is None or observation == self.invalid_value or observation == np.NaN)
317+
return not (observation is None or observation == self.invalid_value or observation == np.nan)
318318

319319
def get_af_by_name(self, name: str):
320320
"""Get the basic acquisition functions by their name."""

0 commit comments

Comments
 (0)