4
4
import numpy as np
5
5
6
6
from kernel_tuner .util import StopCriterionReached
7
+ from scipy .stats .qmc import LatinHypercube
7
8
from kernel_tuner .searchspace import Searchspace
8
9
from kernel_tuner .strategies import common
9
10
from kernel_tuner .strategies .common import CostFunc
10
11
11
12
_options = dict (
12
13
popsize = ("population size" , 50 ),
13
14
maxiter = ("maximum number of generations" , 200 ),
14
- F = ("mutation factor (differential weight)" , 0.8 ),
15
+ F = ("mutation factor (differential weight)" , 1.3 ),
15
16
CR = ("crossover rate" , 0.9 ),
16
17
method = ("method" , "best1bin" ),
17
18
constraint_aware = ("constraint-aware optimization (True/False)" , True ),
35
36
36
37
def tune (searchspace : Searchspace , runner , tuning_options ):
37
38
cost_func = CostFunc (searchspace , tuning_options , runner )
38
- bounds , x0 , _ = cost_func .get_bounds_x0_eps ()
39
+ bounds = cost_func .get_bounds ()
39
40
40
41
options = tuning_options .strategy_options
41
42
popsize , maxiter , F , CR , method , constraint_aware = common .get_options (options , _options )
@@ -97,6 +98,23 @@ def random_draw(idxs, mutate, best):
97
98
return np .random .choice (idxs , draw , replace = draw >= len (idxs ))
98
99
99
100
101
+ def generate_population (tune_params , max_idx , popsize , searchspace , constraint_aware ):
102
+ """ Generate new population, returns Numpy array """
103
+ if constraint_aware :
104
+ samples = LatinHypercube (len (tune_params )).integers (l_bounds = 0 , u_bounds = max_idx , n = popsize , endpoint = True )
105
+ population = [indices_to_values (sample , tune_params ) for sample in samples ]
106
+ population = np .array ([repair (individual , searchspace ) for individual in population ])
107
+ else :
108
+ population = []
109
+ for _ in range (popsize ):
110
+ ind = []
111
+ for key in tune_params :
112
+ ind .append (random .choice (tune_params [key ]))
113
+ population .append (ind )
114
+ population = np .array (population )
115
+ return population
116
+
117
+
100
118
def differential_evolution (searchspace , cost_func , bounds , popsize , maxiter , F , CR , method , constraint_aware , verbose ):
101
119
"""
102
120
A basic implementation of the Differential Evolution algorithm.
@@ -115,18 +133,9 @@ def differential_evolution(searchspace, cost_func, bounds, popsize, maxiter, F,
115
133
bounds = np .array (bounds )
116
134
117
135
# Initialize the population with random individuals within the bounds
118
- if constraint_aware :
119
- population = np .array (list (list (p ) for p in searchspace .get_random_sample (popsize )))
120
- else :
121
- population = []
122
- dna_size = len (tune_params )
123
- for _ in range (pop_size ):
124
- dna = []
125
- for key in tune_params :
126
- dna .append (random .choice (tune_params [key ]))
127
- population .append (dna )
128
- population = np .array (population )
136
+ population = generate_population (tune_params , max_idx , popsize , searchspace , constraint_aware )
129
137
138
+ # Override with user-specified starting position
130
139
population [0 ] = cost_func .get_start_pos ()
131
140
132
141
# Calculate the initial cost for each individual in the population
@@ -140,16 +149,25 @@ def differential_evolution(searchspace, cost_func, bounds, popsize, maxiter, F,
140
149
141
150
# --- 2. Main Loop ---
142
151
152
+ stabilized = 0
153
+
143
154
# Iterate through the specified number of generations
144
155
for generation in range (maxiter ):
145
156
157
+ # Trial population and vectors are stored as lists
158
+ # not Numpy arrays, to make it easy to check for duplicates
146
159
trial_population = []
147
160
161
+ # If for two generations there has been no change, generate a new population
162
+ if stabilized > 2 :
163
+ trial_population = list (generate_population (tune_params , max_idx , popsize , searchspace , constraint_aware ))
164
+
148
165
# Iterate over each individual in the population
149
- for i in range (popsize ):
166
+ i = 0
167
+ stuck = 0
168
+ while len (trial_population ) < popsize :
150
169
151
170
# --- a. Mutation ---
152
-
153
171
# Select three distinct random individuals (a, b, c) from the population,
154
172
# ensuring they are different from the current individual 'i'.
155
173
idxs = [idx for idx in range (popsize ) if idx != i ]
@@ -171,14 +189,30 @@ def differential_evolution(searchspace, cost_func, bounds, popsize, maxiter, F,
171
189
if constraint_aware :
172
190
trial_vector = repair (trial_vector , searchspace )
173
191
174
- # Store for selection
175
- trial_population .append (trial_vector )
192
+ # Store for selection, if not in trial_population already
193
+ if list (trial_vector ) not in trial_population :
194
+ trial_population .append (list (trial_vector ))
195
+ i += 1
196
+ stuck = 0
197
+ else :
198
+ stuck += 1
199
+
200
+ if stuck >= 20 :
201
+ if verbose :
202
+ print ("Differential Evolution got stuck generating new individuals, insert random sample" )
203
+ trial_population .append (list (searchspace .get_random_sample (1 )[0 ]))
204
+ i += 1
205
+ stuck = 0
206
+
176
207
177
208
# --- c. Selection ---
178
209
179
210
# Calculate the cost of the new trial vectors
180
211
trial_population_cost = np .array ([cost_func (ind ) for ind in trial_population ])
181
212
213
+ # Keep track of whether population changes over time
214
+ no_change = True
215
+
182
216
# Iterate over each individual in the trial population
183
217
for i in range (popsize ):
184
218
@@ -188,19 +222,31 @@ def differential_evolution(searchspace, cost_func, bounds, popsize, maxiter, F,
188
222
# If the trial vector has a lower or equal cost, it replaces the
189
223
# target vector in the population for the next generation.
190
224
if trial_cost <= population_cost [i ]:
191
- population [i ] = trial_vector
192
- population_cost [i ] = trial_cost
225
+
226
+ # check if trial_vector is not already in population
227
+ idxs = [idx for idx in range (popsize ) if idx != i ]
228
+ if trial_vector not in population [idxs ]:
229
+ population [i ] = np .array (trial_vector )
230
+ population_cost [i ] = trial_cost
231
+ no_change = False
193
232
194
233
# Update the overall best solution if the new one is better
195
234
if trial_cost < best_cost :
196
235
best_cost = trial_cost
197
236
best_solution = trial_vector
198
237
best_solution_idx = values_to_indices (best_solution , tune_params )
199
238
239
+ # Note if population is stabilizing
240
+ if no_change :
241
+ stabilized += 1
242
+
200
243
# Print the progress at the end of the generation
201
244
if verbose :
202
245
print (f"Generation { generation + 1 } , Best Cost: { best_cost :.6f} " )
203
246
247
+ if verbose :
248
+ print (f"Differential Evolution completed fevals={ len (cost_func .tuning_options .unique_results )} " )
249
+
204
250
return {"solution" : best_solution , "cost" : best_cost }
205
251
206
252
@@ -348,7 +394,7 @@ def repair(trial_vector, searchspace):
348
394
# if we have found valid neighboring configurations, select one at random
349
395
if len (neighbors ) > 0 :
350
396
new_trial_vector = np .array (list (random .choice (neighbors )))
351
- print (f"Differential evolution resulted in invalid config { trial_vector = } , repaired dna to { new_trial_vector = } " )
397
+ print (f"Differential evolution resulted in invalid config { trial_vector = } , repaired to { new_trial_vector = } " )
352
398
return new_trial_vector
353
399
354
400
return trial_vector
0 commit comments