10
10
from kernel_tuner .strategies .common import CostFunc
11
11
12
12
_options = dict (T = ("Starting temperature" , 1.0 ),
13
- T_min = ("End temperature" , 0.001 ),
14
- alpha = ("Alpha parameter" , 0.995 ),
15
- maxiter = ("Number of iterations within each annealing step" , 1 ))
13
+ T_min = ("End temperature" , 0.001 ),
14
+ alpha = ("Alpha parameter" , 0.995 ),
15
+ maxiter = ("Number of iterations within each annealing step" , 1 ),
16
+ constraint_aware = ("constraint-aware optimization (True/False)" , True ))
16
17
17
18
def tune (searchspace : Searchspace , runner , tuning_options ):
18
19
# SA works with real parameter values and does not need scaling
19
20
cost_func = CostFunc (searchspace , tuning_options , runner )
20
21
21
22
# optimization parameters
22
- T , T_min , alpha , niter = common .get_options (tuning_options .strategy_options , _options )
23
+ T , T_min , alpha , niter , constraint_aware = common .get_options (tuning_options .strategy_options , _options )
23
24
T_start = T
24
25
25
26
# compute how many iterations would be needed to complete the annealing schedule
@@ -30,7 +31,7 @@ def tune(searchspace: Searchspace, runner, tuning_options):
30
31
max_feval = tuning_options .strategy_options .get ("max_fevals" , max_iter )
31
32
32
33
# get random starting point and evaluate cost
33
- pos = list (searchspace . get_random_sample ( 1 )[ 0 ] )
34
+ pos = generate_starting_point (searchspace , constraint_aware )
34
35
old_cost = cost_func (pos , check_restrictions = False )
35
36
36
37
# main optimization loop
@@ -46,9 +47,9 @@ def tune(searchspace: Searchspace, runner, tuning_options):
46
47
47
48
for _ in range (niter ):
48
49
49
- new_pos = neighbor (pos , searchspace )
50
+ new_pos = neighbor (pos , searchspace , constraint_aware )
50
51
try :
51
- new_cost = cost_func (new_pos , check_restrictions = False )
52
+ new_cost = cost_func (new_pos , check_restrictions = not constraint_aware )
52
53
except util .StopCriterionReached as e :
53
54
if tuning_options .verbose :
54
55
print (e )
@@ -73,7 +74,7 @@ def tune(searchspace: Searchspace, runner, tuning_options):
73
74
stuck = 0
74
75
c_old = c
75
76
if stuck > 100 :
76
- pos = list (searchspace . get_random_sample ( 1 )[ 0 ] )
77
+ pos = generate_starting_point (searchspace , constraint_aware )
77
78
stuck = 0
78
79
79
80
# safeguard
@@ -103,11 +104,49 @@ def acceptance_prob(old_cost, new_cost, T, tuning_options):
103
104
return np .exp (((old_cost - new_cost )/ old_cost )/ T )
104
105
105
106
106
- def neighbor (pos , searchspace : Searchspace ):
107
+ def neighbor (pos , searchspace : Searchspace , constraint_aware = True ):
107
108
"""Return a random neighbor of pos."""
108
- # Note: this is not the same as the previous implementation, because it is possible that non-edge parameters remain the same, but suggested configurations will all be within restrictions
109
- neighbors = searchspace .get_neighbors (tuple (pos ), neighbor_method = 'Hamming' ) if random .random () < 0.2 else searchspace .get_neighbors (tuple (pos ), neighbor_method = 'strictly-adjacent' )
110
- if len (neighbors ) > 0 :
111
- return list (random .choice (neighbors ))
112
- # if there are no neighbors, return a random configuration
113
- return list (searchspace .get_random_sample (1 )[0 ])
109
+
110
+ if constraint_aware :
111
+ # Note: this is not the same as the previous implementation, because it is possible that non-edge parameters remain the same, but suggested configurations will all be within restrictions
112
+ neighbors = searchspace .get_neighbors (tuple (pos ), neighbor_method = 'Hamming' ) if random .random () < 0.2 else searchspace .get_neighbors (tuple (pos ), neighbor_method = 'strictly-adjacent' )
113
+ if len (neighbors ) > 0 :
114
+ return list (random .choice (neighbors ))
115
+ # if there are no neighbors, return a random configuration
116
+ return list (searchspace .get_random_sample (1 )[0 ])
117
+
118
+ else :
119
+ tune_params = searchspace .tune_params
120
+ size = len (pos )
121
+ pos_out = []
122
+ # random mutation
123
+ # expected value is set that values all dimensions attempt to get mutated
124
+ for i in range (size ):
125
+ key = list (tune_params .keys ())[i ]
126
+ values = tune_params [key ]
127
+
128
+ if random .random () < 0.2 : #replace with random value
129
+ new_value = random_val (i , tune_params )
130
+ else : #adjacent value
131
+ ind = values .index (pos [i ])
132
+ if random .random () > 0.5 :
133
+ ind += 1
134
+ else :
135
+ ind -= 1
136
+ ind = min (max (ind , 0 ), len (values )- 1 )
137
+ new_value = values [ind ]
138
+
139
+ pos_out .append (new_value )
140
+ return pos_out
141
+
142
+ def random_val (index , tune_params ):
143
+ """return a random value for a parameter"""
144
+ key = list (tune_params .keys ())[index ]
145
+ return random .choice (tune_params [key ])
146
+
147
+ def generate_starting_point (searchspace : Searchspace , constraint_aware = True ):
148
+ if constraint_aware :
149
+ return list (searchspace .get_random_sample (1 )[0 ])
150
+ else :
151
+ tune_params = searchspace .tune_params
152
+ return [random_val (i , tune_params ) for i in range (len (tune_params ))]
0 commit comments