Skip to content

Constrained optimization #298

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Draft
wants to merge 5 commits into
base: master
Choose a base branch
from
Draft
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
37 changes: 36 additions & 1 deletion kernel_tuner/strategies/common.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,7 @@
from time import perf_counter

import numpy as np
from scipy.spatial import distance

from kernel_tuner import util
from kernel_tuner.searchspace import Searchspace
Expand Down Expand Up @@ -88,8 +89,17 @@ def __call__(self, x, check_restrictions=True):

# else check if this is a legal (non-restricted) configuration
if check_restrictions and self.searchspace.restrictions:
legal = self.searchspace.is_param_config_valid(tuple(params))
params_dict = dict(zip(self.searchspace.tune_params.keys(), params))
legal = util.check_restrictions(self.searchspace.restrictions, params_dict, self.tuning_options.verbose)

if "constraint_aware" in self.tuning_options.strategy_options and self.tuning_options.strategy_options["constraint_aware"]:
# attempt to repair
new_params = unscale_and_snap_to_nearest_valid(x, params, self.searchspace, self.tuning_options.eps)
if new_params:
params = new_params
legal = True
x_int = ",".join([str(i) for i in params])

if not legal:
result = params_dict
result[self.tuning_options.objective] = util.InvalidConfig()
Expand Down Expand Up @@ -243,3 +253,28 @@ def scale_from_params(params, tune_params, eps):
for i, v in enumerate(tune_params.values()):
x[i] = 0.5 * eps + v.index(params[i])*eps
return x



def unscale_and_snap_to_nearest_valid(x, params, searchspace, eps):
"""Helper func to snap to the nearest valid configuration"""

# params is nearest unscaled point, but is not valid
neighbors = get_neighbors(params, searchspace)

if neighbors:
# sort on distance to x
neighbors.sort(key=lambda y: distance.euclidean(x,scale_from_params(y, searchspace.tune_params, eps)))

# return closest valid neighbor
return neighbors[0]

return []


def get_neighbors(params, searchspace):
for neighbor_method in ["strictly-adjacent", "adjacent", "Hamming"]:
neighbors = searchspace.get_neighbors_no_cache(tuple(params), neighbor_method=neighbor_method)
if len(neighbors) > 0:
return neighbors
return []
12 changes: 7 additions & 5 deletions kernel_tuner/strategies/firefly_algorithm.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,8 @@
maxiter=("Maximum number of iterations", 100),
B0=("Maximum attractiveness", 1.0),
gamma=("Light absorption coefficient", 1.0),
alpha=("Randomization parameter", 0.2))
alpha=("Randomization parameter", 0.2),
constraint_aware=("constraint-aware optimization (True/False)", True))

def tune(searchspace: Searchspace, runner, tuning_options):

Expand All @@ -23,7 +24,7 @@ def tune(searchspace: Searchspace, runner, tuning_options):
# using this instead of get_bounds because scaling is used
bounds, _, eps = cost_func.get_bounds_x0_eps()

num_particles, maxiter, B0, gamma, alpha = common.get_options(tuning_options.strategy_options, _options)
num_particles, maxiter, B0, gamma, alpha, constraint_aware = common.get_options(tuning_options.strategy_options, _options)

best_score_global = sys.float_info.max
best_position_global = []
Expand All @@ -34,9 +35,10 @@ def tune(searchspace: Searchspace, runner, tuning_options):
swarm.append(Firefly(bounds))

# ensure particles start from legal points
population = list(list(p) for p in searchspace.get_random_sample(num_particles))
for i, particle in enumerate(swarm):
particle.position = scale_from_params(population[i], searchspace.tune_params, eps)
if constraint_aware:
population = list(list(p) for p in searchspace.get_random_sample(num_particles))
for i, particle in enumerate(swarm):
particle.position = scale_from_params(population[i], searchspace.tune_params, eps)

# compute initial intensities
for j in range(num_particles):
Expand Down
171 changes: 118 additions & 53 deletions kernel_tuner/strategies/genetic_algorithm.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,6 +11,7 @@
_options = dict(
popsize=("population size", 20),
maxiter=("maximum number of generations", 100),
constraint_aware=("constraint-aware optimization (True/False)", True),
method=("crossover method to use, choose any from single_point, two_point, uniform, disruptive_uniform", "uniform"),
mutation_chance=("chance to mutate is 1 in mutation_chance", 10),
)
Expand All @@ -19,21 +20,23 @@
def tune(searchspace: Searchspace, runner, tuning_options):

options = tuning_options.strategy_options
pop_size, generations, method, mutation_chance = common.get_options(options, _options)
crossover = supported_methods[method]
pop_size, generations, constraint_aware, method, mutation_chance = common.get_options(options, _options)

GA = GeneticAlgorithm(pop_size, searchspace, constraint_aware, method, mutation_chance)

best_score = 1e20
cost_func = CostFunc(searchspace, tuning_options, runner)

population = list(list(p) for p in searchspace.get_random_sample(pop_size))
population = GA.generate_population()

for generation in range(generations):

# determine fitness of population members
weighted_population = []
for dna in population:
try:
time = cost_func(dna, check_restrictions=False)
# if we are not constraint-aware we should check restrictions upon evaluation
time = cost_func(dna, check_restrictions=not constraint_aware)
except util.StopCriterionReached as e:
if tuning_options.verbose:
print(e)
Expand All @@ -51,18 +54,19 @@ def tune(searchspace: Searchspace, runner, tuning_options):
if tuning_options.verbose:
print("Generation %d, best_score %f" % (generation, best_score))

# build new population for next generation
population = []

# crossover and mutate
while len(population) < pop_size:
dna1, dna2 = weighted_choice(weighted_population, 2)
dna1, dna2 = GA.weighted_choice(weighted_population, 2)

children = crossover(dna1, dna2)
children = GA.crossover(dna1, dna2)

for child in children:
child = mutate(child, mutation_chance, searchspace)
child = GA.mutate(child)

if child not in population and searchspace.is_param_config_valid(tuple(child)):
if child not in population:
population.append(child)

if len(population) >= pop_size:
Expand All @@ -75,57 +79,117 @@ def tune(searchspace: Searchspace, runner, tuning_options):

tune.__doc__ = common.get_strategy_docstring("Genetic Algorithm", _options)

class GeneticAlgorithm:

def __init__(self, pop_size, searchspace, constraint_aware=False, method="uniform", mutation_chance=10):
self.pop_size = pop_size
self.searchspace = searchspace
self.tune_params = searchspace.tune_params.copy()
self.constraint_aware = constraint_aware
self.crossover_method = supported_methods[method]
self.mutation_chance = mutation_chance

def weighted_choice(population, n):
"""Randomly select n unique individuals from a weighted population, fitness determines probability of being selected."""

def random_index_betavariate(pop_size):
# has a higher probability of returning index of item at the head of the list
alpha = 1
beta = 2.5
return int(random.betavariate(alpha, beta) * pop_size)

def random_index_weighted(pop_size):
"""Use weights to increase probability of selection."""
weights = [w for _, w in population]
# invert because lower is better
inverted_weights = [1.0 / w for w in weights]
prefix_sum = np.cumsum(inverted_weights)
total_weight = sum(inverted_weights)
randf = random.random() * total_weight
# return first index of prefix_sum larger than random number
return next(i for i, v in enumerate(prefix_sum) if v > randf)

random_index = random_index_betavariate

indices = [random_index(len(population)) for _ in range(n)]
chosen = []
for ind in indices:
while ind in chosen:
ind = random_index(len(population))
chosen.append(ind)

return [population[ind][0] for ind in chosen]


def mutate(dna, mutation_chance, searchspace: Searchspace, cache=True):
"""Mutate DNA with 1/mutation_chance chance."""
# this is actually a neighbors problem with Hamming distance, choose randomly from returned searchspace list
if int(random.random() * mutation_chance) == 0:
if cache:
neighbors = searchspace.get_neighbors(tuple(dna), neighbor_method="Hamming")
def generate_population(self):
""" Constraint-aware population creation method """
if self.constraint_aware:
pop = list(list(p) for p in self.searchspace.get_random_sample(self.pop_size))
else:
neighbors = searchspace.get_neighbors_no_cache(tuple(dna), neighbor_method="Hamming")
if len(neighbors) > 0:
return list(random.choice(neighbors))
return dna
pop = []
dna_size = len(self.tune_params)
for _ in range(self.pop_size):
dna = []
for key in self.tune_params:
dna.append(random.choice(self.tune_params[key]))
pop.append(dna)
return pop

def crossover(self, dna1, dna2):
""" Apply selected crossover method, repair dna if constraint-aware """
dna1, dna2 = self.crossover_method(dna1, dna2)
if self.constraint_aware:
return self.repair(dna1), self.repair(dna2)
return dna1, dna2

def weighted_choice(self, population, n):
"""Randomly select n unique individuals from a weighted population, fitness determines probability of being selected."""

def random_index_betavariate(pop_size):
# has a higher probability of returning index of item at the head of the list
alpha = 1
beta = 2.5
return int(random.betavariate(alpha, beta) * pop_size)

def random_index_weighted(pop_size):
"""Use weights to increase probability of selection."""
weights = [w for _, w in population]
# invert because lower is better
inverted_weights = [1.0 / w for w in weights]
prefix_sum = np.cumsum(inverted_weights)
total_weight = sum(inverted_weights)
randf = random.random() * total_weight
# return first index of prefix_sum larger than random number
return next(i for i, v in enumerate(prefix_sum) if v > randf)

random_index = random_index_betavariate

indices = [random_index(len(population)) for _ in range(n)]
chosen = []
for ind in indices:
while ind in chosen:
ind = random_index(len(population))
chosen.append(ind)

return [population[ind][0] for ind in chosen]


def mutate(self, dna, cache=False):
"""Mutate DNA with 1/mutation_chance chance."""
# this is actually a neighbors problem with Hamming distance, choose randomly from returned searchspace list
if int(random.random() * self.mutation_chance) == 0:
if self.constraint_aware:
if cache:
neighbors = self.searchspace.get_neighbors(tuple(dna), neighbor_method="Hamming")
else:
neighbors = self.searchspace.get_neighbors_no_cache(tuple(dna), neighbor_method="Hamming")
if len(neighbors) > 0:
return list(random.choice(neighbors))
else:
# select a tunable parameter at random
mutate_index = random.randint(0, len(self.tune_params)-1)
mutate_key = list(self.tune_params.keys())[mutate_index]
# get all possible values for this parameter and remove current value
new_val_options = self.tune_params[mutate_key].copy()
new_val_options.remove(dna[mutate_index])
# pick new value at random
if len(new_val_options) > 0:
new_val = random.choice(new_val_options)
dna[mutate_index] = new_val
return dna


def repair(self, dna):
""" It is possible that crossover methods yield a configuration that is not valid. """
if not self.searchspace.is_param_config_valid(tuple(dna)):
# dna is not valid, try to repair it
# search for valid configurations neighboring this config
# start from strictly-adjacent to increasingly allowing more neighbors
for neighbor_method in ["strictly-adjacent", "adjacent", "Hamming"]:
neighbors = self.searchspace.get_neighbors_no_cache(tuple(dna), neighbor_method=neighbor_method)

# if we have found valid neighboring configurations, select one at random
if len(neighbors) > 0:
new_dna = list(random.choice(neighbors))
print(f"GA crossover resulted in invalid config {dna=}, repaired dna to {new_dna=}")
return new_dna

return dna


def single_point_crossover(dna1, dna2):
"""Crossover dna1 and dna2 at a random index."""
# check if you can do the crossovers using the neighbor index: check which valid parameter configuration is closest to the crossover, probably best to use "adjacent" as it is least strict?
pos = int(random.random() * (len(dna1)))
return (dna1[:pos] + dna2[pos:], dna2[:pos] + dna1[pos:])
return dna1[:pos] + dna2[pos:], dna2[:pos] + dna1[pos:]


def two_point_crossover(dna1, dna2):
Expand All @@ -137,7 +201,7 @@ def two_point_crossover(dna1, dna2):
pos1, pos2 = sorted(random.sample(list(range(start, end)), 2))
child1 = dna1[:pos1] + dna2[pos1:pos2] + dna1[pos2:]
child2 = dna2[:pos1] + dna1[pos1:pos2] + dna2[pos2:]
return (child1, child2)
return child1, child2


def uniform_crossover(dna1, dna2):
Expand Down Expand Up @@ -168,7 +232,7 @@ def disruptive_uniform_crossover(dna1, dna2):
child1[ind] = dna2[ind]
child2[ind] = dna1[ind]
swaps += 1
return (child1, child2)
return child1, child2


supported_methods = {
Expand All @@ -177,3 +241,4 @@ def disruptive_uniform_crossover(dna1, dna2):
"uniform": uniform_crossover,
"disruptive_uniform": disruptive_uniform_crossover,
}

9 changes: 7 additions & 2 deletions kernel_tuner/strategies/greedy_ils.py
Original file line number Diff line number Diff line change
@@ -1,9 +1,9 @@
"""A simple greedy iterative local search algorithm for parameter search."""
import random
from kernel_tuner import util
from kernel_tuner.searchspace import Searchspace
from kernel_tuner.strategies import common
from kernel_tuner.strategies.common import CostFunc
from kernel_tuner.strategies.genetic_algorithm import mutate
from kernel_tuner.strategies.hillclimbers import base_hillclimb

_options = dict(neighbor=("Method for selecting neighboring nodes, choose from Hamming or adjacent", "Hamming"),
Expand Down Expand Up @@ -58,9 +58,14 @@ def tune(searchspace: Searchspace, runner, tuning_options):

tune.__doc__ = common.get_strategy_docstring("Greedy Iterative Local Search (ILS)", _options)

def mutate(indiv, searchspace: Searchspace):
neighbors = searchspace.get_neighbors_no_cache(tuple(indiv), neighbor_method="Hamming")
return list(random.choice(neighbors))


def random_walk(indiv, permutation_size, no_improve, last_improve, searchspace: Searchspace):
if last_improve >= no_improve:
return searchspace.get_random_sample(1)[0]
for _ in range(permutation_size):
indiv = mutate(indiv, 0, searchspace, cache=False)
indiv = mutate(indiv, searchspace)
return indiv
12 changes: 7 additions & 5 deletions kernel_tuner/strategies/pso.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,8 @@
maxiter=("Maximum number of iterations", 100),
w=("Inertia weight constant", 0.5),
c1=("Cognitive constant", 2.0),
c2=("Social constant", 1.0))
c2=("Social constant", 1.0),
constraint_aware=("constraint-aware optimization (True/False)", False))

def tune(searchspace: Searchspace, runner, tuning_options):

Expand All @@ -24,7 +25,7 @@ def tune(searchspace: Searchspace, runner, tuning_options):
bounds, _, eps = cost_func.get_bounds_x0_eps()


num_particles, maxiter, w, c1, c2 = common.get_options(tuning_options.strategy_options, _options)
num_particles, maxiter, w, c1, c2, constraint_aware = common.get_options(tuning_options.strategy_options, _options)

best_score_global = sys.float_info.max
best_position_global = []
Expand All @@ -35,9 +36,10 @@ def tune(searchspace: Searchspace, runner, tuning_options):
swarm.append(Particle(bounds))

# ensure particles start from legal points
population = list(list(p) for p in searchspace.get_random_sample(num_particles))
for i, particle in enumerate(swarm):
particle.position = scale_from_params(population[i], searchspace.tune_params, eps)
if constraint_aware:
population = list(list(p) for p in searchspace.get_random_sample(num_particles))
for i, particle in enumerate(swarm):
particle.position = scale_from_params(population[i], searchspace.tune_params, eps)

# start optimization
for i in range(maxiter):
Expand Down
Loading