Skip to content

Commit 42040d8

Browse files
optimize_bacterialrods: use bayes_opt package to optimize parameters of ABM simulations (starting with 1 paramater)
1 parent 887612f commit 42040d8

File tree

2 files changed

+113
-0
lines changed

2 files changed

+113
-0
lines changed

cr_bayesian_optim/__init__.py

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -29,5 +29,6 @@
2929
import cr_bayesian_optim.sim_branching as sim_branching
3030
import cr_bayesian_optim.plotting as plotting
3131
import cr_bayesian_optim.optimization as optimization
32+
from cr_bayesian_optim.optimize_bacterialrods import *
3233

3334
from .fractal_dim import fractal_dim_main
Lines changed: 112 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,112 @@
1+
import cr_mech_coli as crm
2+
import cr_mech_coli.crm_fit as crm_fit
3+
4+
import numpy as np
5+
import matplotlib.pyplot as plt
6+
7+
from bayes_opt import BayesianOptimization, acquisition
8+
9+
10+
def extract_data(image_timesteps, n_vertices):
11+
masks = [np.loadtxt(f"data/crm_fit/0001/masks/image0010{im}-markers.csv", delimiter=",").T
12+
for im in image_timesteps]
13+
pos_data = [crm.extract_positions(mask, n_vertices)[0] for mask in masks]
14+
iterations_data = [float(im) for im in image_timesteps]
15+
data = [np.array(iterations_data)[1:], np.array(pos_data)[1:]]
16+
return data
17+
18+
19+
def cost(data, settings, init_pos, *param):
20+
(days, x_target) = data
21+
container = crm_fit.predict(param, init_pos, settings)
22+
if container is None:
23+
print("Simulation Failed")
24+
exit()
25+
iterations = container.get_all_iterations()
26+
x_prediction = np.zeros(np.shape(x_target))
27+
delta_iter = np.mean(np.array(iterations)[1:]-np.array(iterations)[:-1])
28+
## TODO why is saved iterations step changes from 952 to 953 ??
29+
iter_data = delta_iter*(np.array(days)-days[0]+1)
30+
ind_last = np.argmin(np.abs(iter_data[-1]-iterations))
31+
i = 0
32+
for iter in iterations[:ind_last+1]:
33+
if np.any(np.abs(iter_data-iter) <= 1.):
34+
cells = container.get_cells_at_iteration(iter)
35+
keys = sorted(cells.keys())
36+
# what is the last dimension: why 3 and not 2 ?
37+
pos = np.array([cells[key][0].pos for key in keys])[:, :, :-1]
38+
x_prediction[i] = pos
39+
i += 1
40+
return np.mean(squared_difference(x_target, x_prediction))
41+
42+
43+
def squared_difference(x_target, x_prediction):
44+
return (x_target-x_prediction)**2
45+
46+
47+
def posterior(optimizer, grid):
48+
mu, sigma = optimizer._gp.predict(grid, return_std=True)
49+
return mu, sigma
50+
51+
52+
def plot_objective_GP(optimizer, bnds, name=''):
53+
for k in bnds.keys():
54+
fig, ax = plt.subplots()
55+
x_gp = np.linspace(*bnds[k], 100)
56+
mean_gp, sigma_gp = posterior(optimizer, x_gp.reshape(-1, 1))
57+
ax.plot(x_gp, mean_gp, label=k)
58+
ax.fill_between(x_gp, mean_gp + sigma_gp, mean_gp - sigma_gp, alpha=0.1)
59+
ax.scatter(optimizer.space.params.flatten(), optimizer.space.target, c="red", s=50, zorder=10)
60+
ax.legend(fontsize=12)
61+
plt.savefig(f'{k}_{name}'+'.png', bbox_inches='tight')
62+
plt.close(fig)
63+
64+
65+
66+
def optimize_bacterialrods_main():
67+
n_vertices = 8
68+
# Extract data from masks which have been previously generated
69+
image_timesteps = ['42', '43', '44', '45', '46', '47', '48', '49', '52']
70+
data = extract_data(image_timesteps, n_vertices)
71+
72+
# Target/model/simulation
73+
# Define settings required to run simulation
74+
settings = crm_fit.Settings.from_toml("data/crm_fit/0001/settings.toml")
75+
settings.constants.n_vertices = n_vertices
76+
settings.constants.n_saves = 15
77+
settings.others = crm_fit.Others(True)
78+
79+
#settings.parameters.damping = crm_fit.SampledFloat(min=0, max=2.5, initial=1.5)
80+
settings.parameters.damping = 2.0
81+
settings.parameters.potential_type.Mie.en = 10.
82+
settings.parameters.potential_type.Mie.em = 1.5
83+
lower, upper, x0, param_infos, constants, constant_infos = settings.generate_optimization_infos(len(data[1][0]))
84+
print(param_infos)
85+
86+
# Define the cost function with arguments as optimizes parameters:
87+
#cost_for_optimization = lambda Damping, Strength: cost(data, settings, data[1][0], Damping, Strength)
88+
#cost_for_optimization = lambda Damping: cost(data, settings, data[1][0], Damping)
89+
cost_for_optimization = lambda Strength: cost(data, settings, data[1][0], Strength)
90+
91+
N_iter = 20
92+
acq = acquisition.ExpectedImprovement(1.) #ProbabilityOfImprovement(1.) #UpperConfidenceBound(kappa=1.)#
93+
bnds = {p_inf[0]: (u_b, l_b) for u_b, l_b, p_inf in zip(lower, upper, param_infos)}
94+
optimizer = BayesianOptimization(
95+
f=None,
96+
acquisition_function=acq,
97+
pbounds=bnds,
98+
verbose=2,
99+
random_state=17695,
100+
)
101+
for j in range(N_iter):
102+
next_params = optimizer.suggest()
103+
target = cost_for_optimization(**next_params)
104+
optimizer.register(
105+
params=next_params,
106+
target=target,
107+
)
108+
plot_objective_GP(optimizer, bnds, name=f'EI_{j}')
109+
110+
111+
if __name__ == "__main__":
112+
optimize_bacterialrods_main()

0 commit comments

Comments
 (0)