3
3
import re
4
4
import numpy as np
5
5
6
+ from scipy .stats .qmc import LatinHypercube
7
+
6
8
from kernel_tuner import util
7
9
from kernel_tuner .searchspace import Searchspace
8
10
from kernel_tuner .strategies import common
11
13
_options = dict (
12
14
popsize = ("population size" , 50 ),
13
15
maxiter = ("maximum number of generations" , 200 ),
14
- F = ("mutation factor (differential weight)" , 0.8 ),
16
+ F = ("mutation factor (differential weight)" , 1.3 ),
15
17
CR = ("crossover rate" , 0.9 ),
16
18
method = ("method" , "best1bin" ),
17
19
constraint_aware = ("constraint-aware optimization (True/False)" , True ),
35
37
36
38
def tune (searchspace : Searchspace , runner , tuning_options ):
37
39
cost_func = CostFunc (searchspace , tuning_options , runner )
38
- bounds , x0 , _ = cost_func .get_bounds_x0_eps ()
40
+ bounds = cost_func .get_bounds ()
39
41
40
42
options = tuning_options .strategy_options
41
43
popsize , maxiter , F , CR , method , constraint_aware = common .get_options (options , _options )
@@ -97,6 +99,22 @@ def random_draw(idxs, mutate, best):
97
99
return np .random .choice (idxs , draw , replace = draw >= len (idxs ))
98
100
99
101
102
+ def generate_population (tune_params , min_idx , max_idx , popsize , searchspace , constraint_aware ):
103
+ if constraint_aware :
104
+ samples = LatinHypercube (len (tune_params )).integers (l_bounds = 0 , u_bounds = max_idx , n = popsize , endpoint = True )
105
+ population = [indices_to_values (sample , tune_params ) for sample in samples ]
106
+ population = np .array ([repair (individual , searchspace ) for individual in population ])
107
+ else :
108
+ population = []
109
+ for _ in range (popsize ):
110
+ ind = []
111
+ for key in tune_params :
112
+ ind .append (random .choice (tune_params [key ]))
113
+ population .append (ind )
114
+ population = np .array (population )
115
+ return population
116
+
117
+
100
118
def differential_evolution (searchspace , cost_func , bounds , popsize , maxiter , F , CR , method , constraint_aware , verbose ):
101
119
"""
102
120
A basic implementation of the Differential Evolution algorithm.
@@ -115,18 +133,9 @@ def differential_evolution(searchspace, cost_func, bounds, popsize, maxiter, F,
115
133
bounds = np .array (bounds )
116
134
117
135
# Initialize the population with random individuals within the bounds
118
- if constraint_aware :
119
- population = np .array (list (list (p ) for p in searchspace .get_random_sample (popsize )))
120
- else :
121
- population = []
122
- dna_size = len (self .tune_params )
123
- for _ in range (self .pop_size ):
124
- dna = []
125
- for key in self .tune_params :
126
- dna .append (random .choice (self .tune_params [key ]))
127
- population .append (dna )
128
- population = np .array (population )
136
+ population = generate_population (tune_params , min_idx , max_idx , popsize , searchspace , constraint_aware )
129
137
138
+ # Override with user-specified starting position
130
139
population [0 ] = cost_func .get_start_pos ()
131
140
132
141
# Calculate the initial cost for each individual in the population
@@ -140,16 +149,25 @@ def differential_evolution(searchspace, cost_func, bounds, popsize, maxiter, F,
140
149
141
150
# --- 2. Main Loop ---
142
151
152
+ stabilized = 0
153
+
143
154
# Iterate through the specified number of generations
144
155
for generation in range (maxiter ):
145
156
157
+ # Trial population and vectors are stored as lists
158
+ # not Numpy arrays, to make it easy to check for duplicates
146
159
trial_population = []
147
160
161
+ # If for two generations there has been no change, generate a new population
162
+ if stabilized > 2 :
163
+ trial_population = list (generate_population (tune_params , min_idx , max_idx , popsize , searchspace , constraint_aware ))
164
+
148
165
# Iterate over each individual in the population
149
- for i in range (popsize ):
166
+ i = 0
167
+ stuck = 0
168
+ while len (trial_population ) < popsize :
150
169
151
170
# --- a. Mutation ---
152
-
153
171
# Select three distinct random individuals (a, b, c) from the population,
154
172
# ensuring they are different from the current individual 'i'.
155
173
idxs = [idx for idx in range (popsize ) if idx != i ]
@@ -172,13 +190,28 @@ def differential_evolution(searchspace, cost_func, bounds, popsize, maxiter, F,
172
190
trial_vector = repair (trial_vector , searchspace )
173
191
174
192
# Store for selection
175
- trial_population .append (trial_vector )
193
+ if list (trial_vector ) not in trial_population :
194
+ trial_population .append (list (trial_vector ))
195
+ i += 1
196
+ stuck = 0
197
+ else :
198
+ stuck += 1
199
+ if stuck >= 20 :
200
+ if verbose :
201
+ print (f"Differential Evolution got stuck generating new individuals, insert random sample" )
202
+ trial_population .append (list (searchspace .get_random_sample (1 )[0 ]))
203
+ i += 1
204
+ stuck = 0
205
+
176
206
177
207
# --- c. Selection ---
178
208
179
209
# Calculate the cost of the new trial vectors
180
210
trial_population_cost = np .array ([cost_func (ind ) for ind in trial_population ])
181
211
212
+ # Keep track of whether population changes over time
213
+ no_change = True
214
+
182
215
# Iterate over each individual in the trial population
183
216
for i in range (popsize ):
184
217
@@ -188,19 +221,31 @@ def differential_evolution(searchspace, cost_func, bounds, popsize, maxiter, F,
188
221
# If the trial vector has a lower or equal cost, it replaces the
189
222
# target vector in the population for the next generation.
190
223
if trial_cost <= population_cost [i ]:
191
- population [i ] = trial_vector
192
- population_cost [i ] = trial_cost
224
+
225
+ # check if trial_vector is not already in population
226
+ idxs = [idx for idx in range (popsize ) if idx != i ]
227
+ if trial_vector not in population [idxs ]:
228
+ population [i ] = np .array (trial_vector )
229
+ population_cost [i ] = trial_cost
230
+ no_change = False
193
231
194
232
# Update the overall best solution if the new one is better
195
233
if trial_cost < best_cost :
196
234
best_cost = trial_cost
197
235
best_solution = trial_vector
198
236
best_solution_idx = values_to_indices (best_solution , tune_params )
199
237
238
+ # Note if population is stabilizing
239
+ if no_change :
240
+ stabilized += 1
241
+
200
242
# Print the progress at the end of the generation
201
243
if verbose :
202
244
print (f"Generation { generation + 1 } , Best Cost: { best_cost :.6f} " )
203
245
246
+ if verbose :
247
+ print (f"Differential Evolution completed fevals={ len (cost_func .tuning_options .unique_results )} " )
248
+
204
249
return {"solution" : best_solution , "cost" : best_cost }
205
250
206
251
@@ -348,7 +393,7 @@ def repair(trial_vector, searchspace):
348
393
# if we have found valid neighboring configurations, select one at random
349
394
if len (neighbors ) > 0 :
350
395
new_trial_vector = np .array (list (random .choice (neighbors )))
351
- print (f"Differential evolution resulted in invalid config { trial_vector = } , repaired dna to { new_trial_vector = } " )
396
+ print (f"Differential evolution resulted in invalid config { trial_vector = } , repaired to { new_trial_vector = } " )
352
397
return new_trial_vector
353
398
354
399
return trial_vector
0 commit comments