Skip to content

Commit ea7a69d

Browse files
LHS sampling, enforce trial population diversity, avoid getting stuck
1 parent 962e5f9 commit ea7a69d

File tree

1 file changed

+64
-19
lines changed

1 file changed

+64
-19
lines changed

kernel_tuner/strategies/diff_evo.py

Lines changed: 64 additions & 19 deletions
Original file line numberDiff line numberDiff line change
@@ -3,6 +3,8 @@
33
import re
44
import numpy as np
55

6+
from scipy.stats.qmc import LatinHypercube
7+
68
from kernel_tuner import util
79
from kernel_tuner.searchspace import Searchspace
810
from kernel_tuner.strategies import common
@@ -11,7 +13,7 @@
1113
_options = dict(
1214
popsize=("population size", 50),
1315
maxiter=("maximum number of generations", 200),
14-
F=("mutation factor (differential weight)", 0.8),
16+
F=("mutation factor (differential weight)", 1.3),
1517
CR=("crossover rate", 0.9),
1618
method=("method", "best1bin"),
1719
constraint_aware=("constraint-aware optimization (True/False)", True),
@@ -35,7 +37,7 @@
3537

3638
def tune(searchspace: Searchspace, runner, tuning_options):
3739
cost_func = CostFunc(searchspace, tuning_options, runner)
38-
bounds, x0, _ = cost_func.get_bounds_x0_eps()
40+
bounds = cost_func.get_bounds()
3941

4042
options = tuning_options.strategy_options
4143
popsize, maxiter, F, CR, method, constraint_aware = common.get_options(options, _options)
@@ -97,6 +99,22 @@ def random_draw(idxs, mutate, best):
9799
return np.random.choice(idxs, draw, replace=draw >= len(idxs))
98100

99101

102+
def generate_population(tune_params, min_idx, max_idx, popsize, searchspace, constraint_aware):
103+
if constraint_aware:
104+
samples = LatinHypercube(len(tune_params)).integers(l_bounds=0, u_bounds=max_idx, n=popsize, endpoint=True)
105+
population = [indices_to_values(sample, tune_params) for sample in samples]
106+
population = np.array([repair(individual, searchspace) for individual in population])
107+
else:
108+
population = []
109+
for _ in range(popsize):
110+
ind = []
111+
for key in tune_params:
112+
ind.append(random.choice(tune_params[key]))
113+
population.append(ind)
114+
population = np.array(population)
115+
return population
116+
117+
100118
def differential_evolution(searchspace, cost_func, bounds, popsize, maxiter, F, CR, method, constraint_aware, verbose):
101119
"""
102120
A basic implementation of the Differential Evolution algorithm.
@@ -115,18 +133,9 @@ def differential_evolution(searchspace, cost_func, bounds, popsize, maxiter, F,
115133
bounds = np.array(bounds)
116134

117135
# Initialize the population with random individuals within the bounds
118-
if constraint_aware:
119-
population = np.array(list(list(p) for p in searchspace.get_random_sample(popsize)))
120-
else:
121-
population = []
122-
dna_size = len(self.tune_params)
123-
for _ in range(self.pop_size):
124-
dna = []
125-
for key in self.tune_params:
126-
dna.append(random.choice(self.tune_params[key]))
127-
population.append(dna)
128-
population = np.array(population)
136+
population = generate_population(tune_params, min_idx, max_idx, popsize, searchspace, constraint_aware)
129137

138+
# Override with user-specified starting position
130139
population[0] = cost_func.get_start_pos()
131140

132141
# Calculate the initial cost for each individual in the population
@@ -140,16 +149,25 @@ def differential_evolution(searchspace, cost_func, bounds, popsize, maxiter, F,
140149

141150
# --- 2. Main Loop ---
142151

152+
stabilized = 0
153+
143154
# Iterate through the specified number of generations
144155
for generation in range(maxiter):
145156

157+
# Trial population and vectors are stored as lists
158+
# not Numpy arrays, to make it easy to check for duplicates
146159
trial_population = []
147160

161+
# If for two generations there has been no change, generate a new population
162+
if stabilized > 2:
163+
trial_population = list(generate_population(tune_params, min_idx, max_idx, popsize, searchspace, constraint_aware))
164+
148165
# Iterate over each individual in the population
149-
for i in range(popsize):
166+
i = 0
167+
stuck = 0
168+
while len(trial_population) < popsize:
150169

151170
# --- a. Mutation ---
152-
153171
# Select three distinct random individuals (a, b, c) from the population,
154172
# ensuring they are different from the current individual 'i'.
155173
idxs = [idx for idx in range(popsize) if idx != i]
@@ -172,13 +190,28 @@ def differential_evolution(searchspace, cost_func, bounds, popsize, maxiter, F,
172190
trial_vector = repair(trial_vector, searchspace)
173191

174192
# Store for selection
175-
trial_population.append(trial_vector)
193+
if list(trial_vector) not in trial_population:
194+
trial_population.append(list(trial_vector))
195+
i += 1
196+
stuck = 0
197+
else:
198+
stuck += 1
199+
if stuck >= 20:
200+
if verbose:
201+
print(f"Differential Evolution got stuck generating new individuals, insert random sample")
202+
trial_population.append(list(searchspace.get_random_sample(1)[0]))
203+
i += 1
204+
stuck = 0
205+
176206

177207
# --- c. Selection ---
178208

179209
# Calculate the cost of the new trial vectors
180210
trial_population_cost = np.array([cost_func(ind) for ind in trial_population])
181211

212+
# Keep track of whether population changes over time
213+
no_change = True
214+
182215
# Iterate over each individual in the trial population
183216
for i in range(popsize):
184217

@@ -188,19 +221,31 @@ def differential_evolution(searchspace, cost_func, bounds, popsize, maxiter, F,
188221
# If the trial vector has a lower or equal cost, it replaces the
189222
# target vector in the population for the next generation.
190223
if trial_cost <= population_cost[i]:
191-
population[i] = trial_vector
192-
population_cost[i] = trial_cost
224+
225+
# check if trial_vector is not already in population
226+
idxs = [idx for idx in range(popsize) if idx != i]
227+
if trial_vector not in population[idxs]:
228+
population[i] = np.array(trial_vector)
229+
population_cost[i] = trial_cost
230+
no_change = False
193231

194232
# Update the overall best solution if the new one is better
195233
if trial_cost < best_cost:
196234
best_cost = trial_cost
197235
best_solution = trial_vector
198236
best_solution_idx = values_to_indices(best_solution, tune_params)
199237

238+
# Note if population is stabilizing
239+
if no_change:
240+
stabilized += 1
241+
200242
# Print the progress at the end of the generation
201243
if verbose:
202244
print(f"Generation {generation + 1}, Best Cost: {best_cost:.6f}")
203245

246+
if verbose:
247+
print(f"Differential Evolution completed fevals={len(cost_func.tuning_options.unique_results)}")
248+
204249
return {"solution": best_solution, "cost": best_cost}
205250

206251

@@ -348,7 +393,7 @@ def repair(trial_vector, searchspace):
348393
# if we have found valid neighboring configurations, select one at random
349394
if len(neighbors) > 0:
350395
new_trial_vector = np.array(list(random.choice(neighbors)))
351-
print(f"Differential evolution resulted in invalid config {trial_vector=}, repaired dna to {new_trial_vector=}")
396+
print(f"Differential evolution resulted in invalid config {trial_vector=}, repaired to {new_trial_vector=}")
352397
return new_trial_vector
353398

354399
return trial_vector

0 commit comments

Comments
 (0)