Skip to content

Commit 939ea19

Browse files
constraint-aware variants for pso, firefly, and sa
1 parent 67a5070 commit 939ea19

File tree

4 files changed

+104
-26
lines changed

4 files changed

+104
-26
lines changed

kernel_tuner/strategies/common.py

Lines changed: 36 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -3,6 +3,7 @@
33
from time import perf_counter
44

55
import numpy as np
6+
from scipy.spatial import distance
67

78
from kernel_tuner import util
89
from kernel_tuner.searchspace import Searchspace
@@ -88,8 +89,17 @@ def __call__(self, x, check_restrictions=True):
8889

8990
# else check if this is a legal (non-restricted) configuration
9091
if check_restrictions and self.searchspace.restrictions:
92+
legal = self.searchspace.is_param_config_valid(tuple(params))
9193
params_dict = dict(zip(self.searchspace.tune_params.keys(), params))
92-
legal = util.check_restrictions(self.searchspace.restrictions, params_dict, self.tuning_options.verbose)
94+
95+
if "constraint_aware" in self.tuning_options.strategy_options and self.tuning_options.strategy_options["constraint_aware"]:
96+
# attempt to repair
97+
new_params = unscale_and_snap_to_nearest_valid(x, params, self.searchspace, self.tuning_options.eps)
98+
if new_params:
99+
params = new_params
100+
legal = True
101+
x_int = ",".join([str(i) for i in params])
102+
93103
if not legal:
94104
result = params_dict
95105
result[self.tuning_options.objective] = util.InvalidConfig()
@@ -243,3 +253,28 @@ def scale_from_params(params, tune_params, eps):
243253
for i, v in enumerate(tune_params.values()):
244254
x[i] = 0.5 * eps + v.index(params[i])*eps
245255
return x
256+
257+
258+
259+
def unscale_and_snap_to_nearest_valid(x, params, searchspace, eps):
260+
"""Helper func to snap to the nearest valid configuration"""
261+
262+
# params is nearest unscaled point, but is not valid
263+
neighbors = get_neighbors(params, searchspace)
264+
265+
if neighbors:
266+
# sort on distance to x
267+
neighbors.sort(key=lambda y: distance.euclidean(x,scale_from_params(y, searchspace.tune_params, eps)))
268+
269+
# return closest valid neighbor
270+
return neighbors[0]
271+
272+
return []
273+
274+
275+
def get_neighbors(params, searchspace):
276+
for neighbor_method in ["strictly-adjacent", "adjacent", "Hamming"]:
277+
neighbors = searchspace.get_neighbors_no_cache(tuple(params), neighbor_method=neighbor_method)
278+
if len(neighbors) > 0:
279+
return neighbors
280+
return []

kernel_tuner/strategies/firefly_algorithm.py

Lines changed: 7 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -13,7 +13,8 @@
1313
maxiter=("Maximum number of iterations", 100),
1414
B0=("Maximum attractiveness", 1.0),
1515
gamma=("Light absorption coefficient", 1.0),
16-
alpha=("Randomization parameter", 0.2))
16+
alpha=("Randomization parameter", 0.2),
17+
constraint_aware=("constraint-aware optimization (True/False)", True))
1718

1819
def tune(searchspace: Searchspace, runner, tuning_options):
1920

@@ -23,7 +24,7 @@ def tune(searchspace: Searchspace, runner, tuning_options):
2324
# using this instead of get_bounds because scaling is used
2425
bounds, _, eps = cost_func.get_bounds_x0_eps()
2526

26-
num_particles, maxiter, B0, gamma, alpha = common.get_options(tuning_options.strategy_options, _options)
27+
num_particles, maxiter, B0, gamma, alpha, constraint_aware = common.get_options(tuning_options.strategy_options, _options)
2728

2829
best_score_global = sys.float_info.max
2930
best_position_global = []
@@ -34,9 +35,10 @@ def tune(searchspace: Searchspace, runner, tuning_options):
3435
swarm.append(Firefly(bounds))
3536

3637
# ensure particles start from legal points
37-
population = list(list(p) for p in searchspace.get_random_sample(num_particles))
38-
for i, particle in enumerate(swarm):
39-
particle.position = scale_from_params(population[i], searchspace.tune_params, eps)
38+
if constraint_aware:
39+
population = list(list(p) for p in searchspace.get_random_sample(num_particles))
40+
for i, particle in enumerate(swarm):
41+
particle.position = scale_from_params(population[i], searchspace.tune_params, eps)
4042

4143
# compute initial intensities
4244
for j in range(num_particles):

kernel_tuner/strategies/pso.py

Lines changed: 7 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -13,7 +13,8 @@
1313
maxiter=("Maximum number of iterations", 100),
1414
w=("Inertia weight constant", 0.5),
1515
c1=("Cognitive constant", 2.0),
16-
c2=("Social constant", 1.0))
16+
c2=("Social constant", 1.0),
17+
constraint_aware=("constraint-aware optimization (True/False)", False))
1718

1819
def tune(searchspace: Searchspace, runner, tuning_options):
1920

@@ -24,7 +25,7 @@ def tune(searchspace: Searchspace, runner, tuning_options):
2425
bounds, _, eps = cost_func.get_bounds_x0_eps()
2526

2627

27-
num_particles, maxiter, w, c1, c2 = common.get_options(tuning_options.strategy_options, _options)
28+
num_particles, maxiter, w, c1, c2, constraint_aware = common.get_options(tuning_options.strategy_options, _options)
2829

2930
best_score_global = sys.float_info.max
3031
best_position_global = []
@@ -35,9 +36,10 @@ def tune(searchspace: Searchspace, runner, tuning_options):
3536
swarm.append(Particle(bounds))
3637

3738
# ensure particles start from legal points
38-
population = list(list(p) for p in searchspace.get_random_sample(num_particles))
39-
for i, particle in enumerate(swarm):
40-
particle.position = scale_from_params(population[i], searchspace.tune_params, eps)
39+
if constraint_aware:
40+
population = list(list(p) for p in searchspace.get_random_sample(num_particles))
41+
for i, particle in enumerate(swarm):
42+
particle.position = scale_from_params(population[i], searchspace.tune_params, eps)
4143

4244
# start optimization
4345
for i in range(maxiter):

kernel_tuner/strategies/simulated_annealing.py

Lines changed: 54 additions & 15 deletions
Original file line numberDiff line numberDiff line change
@@ -10,16 +10,17 @@
1010
from kernel_tuner.strategies.common import CostFunc
1111

1212
_options = dict(T=("Starting temperature", 1.0),
13-
T_min=("End temperature", 0.001),
14-
alpha=("Alpha parameter", 0.995),
15-
maxiter=("Number of iterations within each annealing step", 1))
13+
T_min=("End temperature", 0.001),
14+
alpha=("Alpha parameter", 0.995),
15+
maxiter=("Number of iterations within each annealing step", 1),
16+
constraint_aware=("constraint-aware optimization (True/False)", True))
1617

1718
def tune(searchspace: Searchspace, runner, tuning_options):
1819
# SA works with real parameter values and does not need scaling
1920
cost_func = CostFunc(searchspace, tuning_options, runner)
2021

2122
# optimization parameters
22-
T, T_min, alpha, niter = common.get_options(tuning_options.strategy_options, _options)
23+
T, T_min, alpha, niter, constraint_aware = common.get_options(tuning_options.strategy_options, _options)
2324
T_start = T
2425

2526
# compute how many iterations would be needed to complete the annealing schedule
@@ -30,7 +31,7 @@ def tune(searchspace: Searchspace, runner, tuning_options):
3031
max_feval = tuning_options.strategy_options.get("max_fevals", max_iter)
3132

3233
# get random starting point and evaluate cost
33-
pos = list(searchspace.get_random_sample(1)[0])
34+
pos = generate_starting_point(searchspace, constraint_aware)
3435
old_cost = cost_func(pos, check_restrictions=False)
3536

3637
# main optimization loop
@@ -46,9 +47,9 @@ def tune(searchspace: Searchspace, runner, tuning_options):
4647

4748
for _ in range(niter):
4849

49-
new_pos = neighbor(pos, searchspace)
50+
new_pos = neighbor(pos, searchspace, constraint_aware)
5051
try:
51-
new_cost = cost_func(new_pos, check_restrictions=False)
52+
new_cost = cost_func(new_pos, check_restrictions=not constraint_aware)
5253
except util.StopCriterionReached as e:
5354
if tuning_options.verbose:
5455
print(e)
@@ -73,7 +74,7 @@ def tune(searchspace: Searchspace, runner, tuning_options):
7374
stuck = 0
7475
c_old = c
7576
if stuck > 100:
76-
pos = list(searchspace.get_random_sample(1)[0])
77+
pos = generate_starting_point(searchspace, constraint_aware)
7778
stuck = 0
7879

7980
# safeguard
@@ -103,11 +104,49 @@ def acceptance_prob(old_cost, new_cost, T, tuning_options):
103104
return np.exp(((old_cost-new_cost)/old_cost)/T)
104105

105106

106-
def neighbor(pos, searchspace: Searchspace):
107+
def neighbor(pos, searchspace: Searchspace, constraint_aware=True):
107108
"""Return a random neighbor of pos."""
108-
# Note: this is not the same as the previous implementation, because it is possible that non-edge parameters remain the same, but suggested configurations will all be within restrictions
109-
neighbors = searchspace.get_neighbors(tuple(pos), neighbor_method='Hamming') if random.random() < 0.2 else searchspace.get_neighbors(tuple(pos), neighbor_method='strictly-adjacent')
110-
if len(neighbors) > 0:
111-
return list(random.choice(neighbors))
112-
# if there are no neighbors, return a random configuration
113-
return list(searchspace.get_random_sample(1)[0])
109+
110+
if constraint_aware:
111+
# Note: this is not the same as the previous implementation, because it is possible that non-edge parameters remain the same, but suggested configurations will all be within restrictions
112+
neighbors = searchspace.get_neighbors(tuple(pos), neighbor_method='Hamming') if random.random() < 0.2 else searchspace.get_neighbors(tuple(pos), neighbor_method='strictly-adjacent')
113+
if len(neighbors) > 0:
114+
return list(random.choice(neighbors))
115+
# if there are no neighbors, return a random configuration
116+
return list(searchspace.get_random_sample(1)[0])
117+
118+
else:
119+
tune_params = searchspace.tune_params
120+
size = len(pos)
121+
pos_out = []
122+
# random mutation
123+
# expected value is set that values all dimensions attempt to get mutated
124+
for i in range(size):
125+
key = list(tune_params.keys())[i]
126+
values = tune_params[key]
127+
128+
if random.random() < 0.2: #replace with random value
129+
new_value = random_val(i, tune_params)
130+
else: #adjacent value
131+
ind = values.index(pos[i])
132+
if random.random() > 0.5:
133+
ind += 1
134+
else:
135+
ind -= 1
136+
ind = min(max(ind, 0), len(values)-1)
137+
new_value = values[ind]
138+
139+
pos_out.append(new_value)
140+
return pos_out
141+
142+
def random_val(index, tune_params):
143+
"""return a random value for a parameter"""
144+
key = list(tune_params.keys())[index]
145+
return random.choice(tune_params[key])
146+
147+
def generate_starting_point(searchspace: Searchspace, constraint_aware=True):
148+
if constraint_aware:
149+
return list(searchspace.get_random_sample(1)[0])
150+
else:
151+
tune_params = searchspace.tune_params
152+
return [random_val(i, tune_params) for i in range(len(tune_params))]

0 commit comments

Comments
 (0)