Skip to content

Commit 2ca5901

Browse files
author
Serin Lee
committed
updating cluster module and ACOM to use cluster dataset for strain mapping
1 parent e3e918a commit 2ca5901

File tree

2 files changed

+125
-95
lines changed

2 files changed

+125
-95
lines changed

py4DSTEM/process/diffraction/crystal_ACOM.py

Lines changed: 58 additions & 59 deletions
Original file line numberDiff line numberDiff line change
@@ -2212,8 +2212,6 @@ def calculate_strain(
22122212
deformation tensor which transforms the simulated diffraction pattern
22132213
into the experimental pattern, for all probe positons.
22142214
2215-
TODO: add robust fitting?
2216-
22172215
Parameters
22182216
----------
22192217
bragg_peaks_array (PointListArray):
@@ -2334,71 +2332,72 @@ def calculate_strain(
23342332
inds_match[a0] = ind_min
23352333
keep[a0] = True
23362334

2337-
# Get all paired peaks
2338-
qxy = np.vstack((p.data["qx"][keep], p.data["qy"][keep])).T
2339-
qxy_ref = np.vstack(
2340-
(p_ref.data["qx"][inds_match[keep]], p_ref.data["qy"][inds_match[keep]])
2341-
).T
2335+
if np.sum(keep) >= min_num_peaks:
2336+
# Get all paired peaks
2337+
qxy = np.vstack((p.data["qx"][keep], p.data["qy"][keep])).T
2338+
qxy_ref = np.vstack(
2339+
(p_ref.data["qx"][inds_match[keep]], p_ref.data["qy"][inds_match[keep]])
2340+
).T
23422341

2343-
# Fit transformation matrix
2344-
# Note - not sure about transpose here
2345-
# (though it might not matter if rotation isn't included)
2346-
if intensity_weighting:
2347-
weights = np.sqrt(p.data["intensity"][keep, None]) * 0 + 1
2348-
m = lstsq(
2349-
qxy_ref * weights,
2350-
qxy * weights,
2351-
rcond=None,
2352-
)[0].T
2353-
else:
2354-
m = lstsq(
2355-
qxy_ref,
2356-
qxy,
2357-
rcond=None,
2358-
)[0].T
2359-
2360-
# Robust fitting
2361-
if robust:
2362-
for a0 in range(5):
2363-
# calculate new weights
2364-
qxy_fit = qxy_ref @ m
2365-
diff2 = np.sum((qxy_fit - qxy) ** 2, axis=1)
2366-
2367-
weights = np.exp(
2368-
diff2 / ((-2 * robust_thresh**2) * np.median(diff2))
2369-
)[:, None]
2370-
if intensity_weighting:
2371-
weights *= np.sqrt(p.data["intensity"][keep, None])
2372-
2373-
# calculate new fits
2342+
# Fit transformation matrix
2343+
# Note - not sure about transpose here
2344+
# (though it might not matter if rotation isn't included)
2345+
if intensity_weighting:
2346+
weights = np.sqrt(p.data["intensity"][keep, None]) * 0 + 1
23742347
m = lstsq(
23752348
qxy_ref * weights,
23762349
qxy * weights,
23772350
rcond=None,
23782351
)[0].T
2352+
else:
2353+
m = lstsq(
2354+
qxy_ref,
2355+
qxy,
2356+
rcond=None,
2357+
)[0].T
23792358

2380-
# Set values into the infinitesimal strain matrix
2381-
strain_map.get_slice("e_xx").data[rx, ry] = 1 - m[0, 0]
2382-
strain_map.get_slice("e_yy").data[rx, ry] = 1 - m[1, 1]
2383-
strain_map.get_slice("e_xy").data[rx, ry] = -(m[0, 1] + m[1, 0]) / 2.0
2384-
strain_map.get_slice("theta").data[rx, ry] = (m[0, 1] - m[1, 0]) / 2.0
2385-
2386-
# Add finite rotation from ACOM orientation map.
2387-
# I am not sure about the relative signs here.
2388-
# Also, maybe I need to add in the mirror operator?
2389-
if orientation_map.mirror[rx, ry, 0]:
2390-
strain_map.get_slice("theta").data[rx, ry] += (
2391-
orientation_map.angles[rx, ry, 0, 0]
2392-
+ orientation_map.angles[rx, ry, 0, 2]
2393-
)
2394-
else:
2395-
strain_map.get_slice("theta").data[rx, ry] -= (
2396-
orientation_map.angles[rx, ry, 0, 0]
2397-
+ orientation_map.angles[rx, ry, 0, 2]
2398-
)
2359+
# Robust fitting
2360+
if robust:
2361+
for a0 in range(5):
2362+
# calculate new weights
2363+
qxy_fit = qxy_ref @ m
2364+
diff2 = np.sum((qxy_fit - qxy) ** 2, axis=1)
2365+
2366+
weights = np.exp(
2367+
diff2 / ((-2 * robust_thresh**2) * np.median(diff2))
2368+
)[:, None]
2369+
if intensity_weighting:
2370+
weights *= np.sqrt(p.data["intensity"][keep, None])
2371+
2372+
# calculate new fits
2373+
m = lstsq(
2374+
qxy_ref * weights,
2375+
qxy * weights,
2376+
rcond=None,
2377+
)[0].T
2378+
2379+
# Set values into the infinitesimal strain matrix
2380+
strain_map.get_slice("e_xx").data[rx, ry] = 1 - m[0, 0]
2381+
strain_map.get_slice("e_yy").data[rx, ry] = 1 - m[1, 1]
2382+
strain_map.get_slice("e_xy").data[rx, ry] = -(m[0, 1] + m[1, 0]) / 2.0
2383+
strain_map.get_slice("theta").data[rx, ry] = (m[0, 1] - m[1, 0]) / 2.0
2384+
2385+
# Add finite rotation from ACOM orientation map.
2386+
# I am not sure about the relative signs here.
2387+
# Also, maybe I need to add in the mirror operator?
2388+
if orientation_map.mirror[rx, ry, 0]:
2389+
strain_map.get_slice("theta").data[rx, ry] += (
2390+
orientation_map.angles[rx, ry, 0, 0]
2391+
+ orientation_map.angles[rx, ry, 0, 2]
2392+
)
2393+
else:
2394+
strain_map.get_slice("theta").data[rx, ry] -= (
2395+
orientation_map.angles[rx, ry, 0, 0]
2396+
+ orientation_map.angles[rx, ry, 0, 2]
2397+
)
23992398

2400-
else:
2401-
strain_map.get_slice("mask").data[rx, ry] = 0.0
2399+
else:
2400+
strain_map.get_slice("mask").data[rx, ry] = 0.0
24022401

24032402
if rotation_range is not None:
24042403
strain_map.get_slice("theta").data[:] = np.mod(

py4DSTEM/process/utils/cluster.py

Lines changed: 67 additions & 36 deletions
Original file line numberDiff line numberDiff line change
@@ -15,22 +15,49 @@ class Cluster:
1515
def __init__(
1616
self,
1717
datacube,
18+
r_space_mask,
1819
):
1920
"""
2021
Args:
21-
datacube (py4DSTEM.DataCube): 4D-STEM data
22-
22+
datacube (py4DSTEM.DataCube): 4D-STEM data
23+
r_space_mask (np.ndarray): Mask in real space to apply background thresholding on the similarity array.
2324
2425
"""
25-
2626
self.datacube = datacube
27+
self.r_space_mask = r_space_mask
28+
self.similarity = None
29+
self.similarity_raw = None
30+
31+
def bg_thresholding(self, r_space_mask,):
32+
self.r_space_mask = np.asarray(r_space_mask)
33+
34+
# if similarity is already computed, apply the thresholding
35+
if self.similarity_raw is not None:
36+
self.similarity = self._apply_bg_mask(self.similarity_raw)
37+
38+
def _apply_bg_mask(self, similarity):
39+
if self.r_space_mask is None:
40+
return similarity
41+
return similarity * self.r_space_mask[..., None]
42+
2743

2844
def find_similarity(
2945
self,
30-
mask=None, # by default
46+
q_space_mask=None,
3147
smooth_sigma = 0,
48+
return_similarity = False
3249
):
33-
# Which neighbors to search
50+
51+
"""
52+
Args:
53+
q_space_mask : annular boolean q_space_mask to apply on the diffraction patterns
54+
smooth_sigma : sigma for Gaussian smoothing of the diffraction patterns before calculating similarity
55+
return_similarity : if True, return the similarity array
56+
"""
57+
if self.r_space_mask is None:
58+
self.set_mask(r_space_mask)
59+
60+
# List of neighbors to search
3461
# (-1,-1) will be equivalent to (1,1)
3562
self.dxy = np.array(
3663
(
@@ -61,8 +88,8 @@ def find_similarity(
6188
if smooth_sigma > 0:
6289
diff_ref = gaussian_filter(diff_ref,smooth_sigma)
6390

64-
if mask is not None:
65-
diff_ref = diff_ref[mask]
91+
if q_space_mask is not None:
92+
diff_ref = diff_ref[q_space_mask]
6693

6794
norm_diff_ref = np.sqrt(np.sum(diff_ref * diff_ref))
6895
# diff_ref_mean = np.mean(diff_ref)
@@ -83,18 +110,23 @@ def find_similarity(
83110
if smooth_sigma > 0:
84111
diff = gaussian_filter(diff,smooth_sigma)
85112

86-
if mask is not None:
87-
diff = diff[mask]
113+
if q_space_mask is not None:
114+
diff = diff[q_space_mask]
88115

89-
# image self.similarity with normalized corr: cosine self.similarity?
116+
# image self.similarity with normalized cosine correlation
90117
self.similarity[rx, ry, ind] = (
91118
np.sum(diff * diff_ref)
92119
/ np.sqrt(np.sum(diff * diff))
93120
/ norm_diff_ref
94121
)
95122

96-
# self.similarity[rx, ry, ind] = np.mean(np.abs(diff - diff_ref)) / diff_ref_mean
123+
124+
self.similarity_raw = self.similarity.copy()
125+
self.similarity = self._apply_bg_mask(self.similarity)
97126

127+
if return_similarity:
128+
return self.similarity
129+
98130
# Create a function to map cluster index to color
99131
def get_color(self, cluster_index):
100132
colors = [
@@ -114,33 +146,27 @@ def get_color(self, cluster_index):
114146
# Find the pixel with the highest self.similarity and start the clustering from there
115147
def indexing_clusters_all(
116148
self,
117-
# mask,
118149
threshold,
119150
):
120-
121-
# self.dxy = np.array(
122-
# (
123-
# (-1, -1),
124-
# (-1, 0),
125-
# (-1, 1),
126-
# (0, -1),
127-
# (1, 1),
128-
# (1, 0),
129-
# (1, -1),
130-
# (0, 1),
131-
# )
132-
# )
133-
151+
"""
152+
Args:
153+
threshold : threshold for similarity to consider two pixels as part of the same cluster
154+
"""
155+
134156
sim_averaged = np.mean(self.similarity, axis=2)
135157

158+
# Assigning the background as 'counted'
159+
sim_averaged[~self.r_space_mask] = -1.0
160+
136161
# color the pixels with the cluster index
137-
# map_cluster = np.zeros((sim_averaged.shape[0],sim_averaged.shape[1]))
138162
self.cluster_map = -1 * np.ones(
139163
(sim_averaged.shape[0], sim_averaged.shape[1]), dtype=np.float64
140164
)
141165
self.cluster_map_rgb = np.zeros(
142166
(sim_averaged.shape[0], sim_averaged.shape[1], 4), dtype=np.float64
143167
)
168+
169+
self.cluster_map_rgb[..., 3] = 1.0 #start as opaque black
144170

145171
# store arrays of cluster_indices in a list
146172
self.cluster_list = []
@@ -156,14 +182,18 @@ def indexing_clusters_all(
156182
# finding the pixel that has the highest self.similarity among the pixel that hasn't been clustered yet
157183
# this will be the 'starting pixel' of a new cluster
158184
rx0, ry0 = np.unravel_index(sim_averaged.argmax(), sim_averaged.shape)
159-
# print(rx0, ry0)
185+
186+
# Guarding to check if the seed is background
187+
if self.r_space_mask is not None and not self.r_space_mask[rx0, ry0]:
188+
sim_averaged[rx0, ry0] = -1 # mark processed so we don't pick it again
189+
continue
190+
160191

161192
cluster_indices = np.empty((0, 2))
162193
cluster_indices = (np.append(cluster_indices, [[rx0, ry0]], axis=0)).astype(
163194
np.int32
164195
)
165196

166-
# map_cluster[rx0, ry0] = cluster_count_ind+1
167197
self.cluster_map[rx0, ry0] = cluster_count_ind
168198

169199
color = self.get_color(cluster_count_ind + 1)
@@ -182,7 +212,7 @@ def indexing_clusters_all(
182212
# counter to check if pixel in the cluster are checked for NN
183213
counting_added_pixel += 1
184214

185-
# set to -1 as its NN will be checked
215+
# set to -1 since now its NN will be checked
186216
sim_averaged[rx0, ry0] = -1
187217

188218
for ind in range(self.dxy.shape[0]):
@@ -194,12 +224,13 @@ def indexing_clusters_all(
194224
x_ind < self.similarity.shape[0] - 2 and \
195225
y_ind < self.similarity.shape[1] - 2:
196226

227+
r_ok = True if self.r_space_mask is None else bool(self.r_space_mask[x_ind, y_ind])
228+
197229
# add if the neighbor is similar, but don't add if the neighbor is already in a cluster
198230
if self.similarity[rx0, ry0, ind] >= threshold \
199-
and self.cluster_map[x_ind, y_ind] == -1:
231+
and self.cluster_map[x_ind, y_ind] == -1 and r_ok:
200232

201-
# print(cluster_indices)
202-
# print([[x_ind, y_ind]])
233+
203234
cluster_indices = np.append(
204235
cluster_indices, [[x_ind, y_ind]], axis=0
205236
)
@@ -217,9 +248,9 @@ def indexing_clusters_all(
217248
if counting_added_pixel == 0:
218249
break
219250

220-
# # single pixel cluster
251+
# single pixel cluster
221252
# if cluster_indices.shape[0] == 1:
222-
# self.cluster_map[cluster_indices[0, 0], cluster_indices[0, 1]] = [
253+
# self.cluster_map_rgb[cluster_indices[0, 0], cluster_indices[0, 1]] = [
223254
# 0,
224255
# 0,
225256
# 0,
@@ -229,7 +260,7 @@ def indexing_clusters_all(
229260
self.cluster_list.append(cluster_indices)
230261
cluster_count_ind += 1
231262

232-
# return cluster_count_ind, self.cluster_list, map_cluster, sim_averaged
263+
# return cluster_count_ind, self.cluster_list, self.cluster_map, self.cluster_map_rgb
233264

234265
def create_cluster_cube(
235266
self,

0 commit comments

Comments
 (0)