Skip to content

Commit c795e0f

Browse files
committed
Generic names
1 parent 32abdc1 commit c795e0f

File tree

1 file changed

+68
-54
lines changed

1 file changed

+68
-54
lines changed

scripts/intensity_annotation/gfp_annotation.py

Lines changed: 68 additions & 54 deletions
Original file line numberDiff line numberDiff line change
@@ -58,7 +58,7 @@ def update_hist(self):
5858
# Seaborn version (nicer aesthetics)
5959
sns.histplot(data, bins=self.bins, ax=self.ax, kde=False)
6060

61-
self.ax.set_xlabel(f"{stat_name} GFP Intensity")
61+
self.ax.set_xlabel(f"{stat_name} Marker Intensity")
6262
self.ax.set_ylabel("Count")
6363
self.canvas.draw_idle()
6464

@@ -69,64 +69,64 @@ def _create_stat_widget(statistics, default_stat):
6969

7070

7171
# Extend via watershed, this could work for a better alignment.
72-
def _extend_sgns_complex(gfp, sgns):
72+
def _extend_seg_complex(stain, seg):
7373
# 1.) compute distance to the SGNs to create the background seed.
7474
print("Compute edt")
7575

7676
# Could use parallel impl
7777
distance_threshol = 7
78-
distances = distance_transform(sgns == 0)
78+
distances = distance_transform(seg == 0)
7979

8080
# Erode seeds?
81-
seeds = sgns.copy()
81+
seeds = seg.copy()
8282
bg_seed_id = int(seeds.max()) + 1
8383
seeds[distances > distance_threshol] = bg_seed_id
8484

8585
# Dilate to cover everything on the boundary?
8686
print("Run watershed")
87-
sgns_extended = seeded_watershed(gfp, markers=seeds)
88-
sgns_extended[sgns_extended == bg_seed_id] = 0
87+
seg_extended = seeded_watershed(stain, markers=seeds)
88+
seg_extended[seg_extended == bg_seed_id] = 0
8989

9090
v = napari.Viewer()
91-
v.add_image(gfp)
91+
v.add_image(stain)
9292
v.add_image(distances)
9393
v.add_labels(seeds)
94-
v.add_labels(sgns_extended)
94+
v.add_labels(seg_extended)
9595
napari.run()
9696

9797

9898
# Just dilate by 3 pixels.
99-
def _extend_sgns_simple(gfp, sgns, dilation):
99+
def _extend_seg_simple(stain, seg, dilation):
100100
block_shape = (128,) * 3
101101
halo = (dilation + 2,) * 3
102102

103-
distances = distance_transform(sgns == 0, block_shape=block_shape, halo=halo, n_threads=8)
103+
distances = distance_transform(seg == 0, block_shape=block_shape, halo=halo, n_threads=8)
104104
mask = distances < dilation
105105

106-
sgns_extended = np.zeros_like(sgns)
107-
sgns_extended = seeded_watershed(
108-
distances, sgns, sgns_extended, block_shape=block_shape, halo=halo, n_threads=8, mask=mask
106+
seg_extended = np.zeros_like(seg)
107+
seg_extended = seeded_watershed(
108+
distances, seg, seg_extended, block_shape=block_shape, halo=halo, n_threads=8, mask=mask
109109
)
110110

111-
return sgns_extended
111+
return seg_extended
112112

113113

114-
def _create_mask(sgns_extended, gfp):
114+
def _create_mask(seg_extended, stain):
115115
from skimage.transform import downscale_local_mean, resize
116116

117-
gfp_averaged = downscale_local_mean(gfp, factors=(16, 16, 16))
117+
stain_averaged = downscale_local_mean(stain, factors=(16, 16, 16))
118118
# The 35th percentile seems to be a decent approximation for the background subtraction.
119-
threshold = np.percentile(gfp_averaged, 35)
120-
mask = gfp_averaged > threshold
121-
mask = resize(mask, sgns_extended.shape, order=0, anti_aliasing=False, preserve_range=True).astype(bool)
122-
mask[sgns_extended != 0] = 0
119+
threshold = np.percentile(stain_averaged, 35)
120+
mask = stain_averaged > threshold
121+
mask = resize(mask, seg_extended.shape, order=0, anti_aliasing=False, preserve_range=True).astype(bool)
122+
mask[seg_extended != 0] = 0
123123

124124
# v = napari.Viewer()
125-
# v.add_image(gfp)
126-
# v.add_image(gfp_averaged, scale=(16, 16, 16))
125+
# v.add_image(stain)
126+
# v.add_image(stain_averaged, scale=(16, 16, 16))
127127
# v.add_labels(mask)
128128
# # v.add_labels(mask, scale=(16, 16, 16))
129-
# v.add_labels(sgns_extended)
129+
# v.add_labels(seg_extended)
130130
# napari.run()
131131

132132
return mask
@@ -138,55 +138,65 @@ def gfp_annotation(prefix, default_stat="median", background_norm=None, is_otof=
138138
direc = os.path.dirname(os.path.abspath(prefix))
139139
basename = os.path.basename(prefix)
140140
file_names = [entry.name for entry in os.scandir(direc)]
141-
# Keeping the names "gfp", "sgn", and "pv" is a bit hacky, maybe we should change this to more geeric names.
142141
if is_otof: # OTOF cochlea with VGlut3, Alphatag and IHC segmentation.
143-
gfp_file = [name for name in file_names if basename in name and "GFP" in name][0]
144-
sgn_file = [name for name in file_names if basename in name and "IHC" in name][0]
145-
pv_file = [name for name in file_names if basename in name and "lut3" in name][0]
142+
stain1_file = [name for name in file_names if basename in name and "pha" in name][0]
143+
stain2_file = [name for name in file_names if basename in name and "lut3" in name][0]
144+
seg_file = [name for name in file_names if basename in name and "IHC" in name][0]
145+
146+
stain1_name = "Alphatag"
147+
stain2_name = "Vglut3"
148+
seg_name = "IHC"
149+
146150
else: # ChReef cochlea with PV, GFPand SGN segmentation
147-
gfp_file = [name for name in file_names if basename in name and "GFP" in name][0]
148-
sgn_file = [name for name in file_names if basename in name and "SGN" in name][0]
149-
pv_file = [name for name in file_names if basename in name and "PV" in name][0]
151+
stain1_file = [name for name in file_names if basename in name and "GFP" in name][0]
152+
stain2_file = [name for name in file_names if basename in name and "PV" in name][0]
153+
seg_file = [name for name in file_names if basename in name and "SGN" in name][0]
150154

151-
gfp = imageio.imread(os.path.join(direc, gfp_file))
152-
sgns = imageio.imread(os.path.join(direc, sgn_file))
153-
pv = imageio.imread(os.path.join(direc, pv_file))
155+
stain1_name = "GFP"
156+
stain2_name = "PV"
157+
seg_name = "SGN"
158+
159+
stain1 = imageio.imread(os.path.join(direc, stain1_file))
160+
stain2 = imageio.imread(os.path.join(direc, stain2_file))
161+
seg = imageio.imread(os.path.join(direc, seg_file))
154162

155163
# bb = np.s_[128:-128, 128:-128, 128:-128]
156164
# gfp, sgns, pv = gfp[bb], sgns[bb], pv[bb]
157165
# print(gfp.shape)
158166

159167
# Extend the sgns so that they cover the SGN boundaries.
160-
# sgns_extended = _extend_sgns(gfp, sgns)
168+
# sgns_extended = _extend_seg(gfp, sgns)
161169
# TODO we need to integrate this directly in the object measurement to efficiently do it at scale.
162-
sgns_extended = _extend_sgns_simple(gfp, sgns, dilation=4)
170+
seg_extended = _extend_seg_simple(stain1, seg, dilation=4)
171+
if is_otof:
172+
seg_extended = seg.copy()
163173

164174
# Compute the intensity statistics.
165175
if background_norm is None:
166176
mask = None
167177
feature_set = "default"
168178
else:
169-
mask = _create_mask(sgns_extended, gfp)
170-
assert mask.shape == sgns_extended.shape
179+
mask = _create_mask(seg_extended, stain1)
180+
assert mask.shape == seg_extended.shape
171181
feature_set = "default_background_norm" if background_norm == "division" else "default_background_subtract"
172182
statistics = compute_object_measures_impl(
173-
gfp, sgns_extended, feature_set=feature_set, foreground_mask=mask, median_only=True
183+
stain1, seg_extended, feature_set=feature_set, foreground_mask=mask, median_only=True
174184
)
175185

176186
# Open the napari viewer.
177187
v = napari.Viewer()
178188

179189
# Add the base layers.
180-
v.add_image(gfp, name="GFP")
181-
v.add_image(pv, visible=False, name="PV")
182-
v.add_labels(sgns, visible=False, name="SGNs")
183-
v.add_labels(sgns_extended, name="SGNs-extended")
190+
v.add_image(stain1, name=stain1_name)
191+
v.add_image(stain2, visible=False, name=stain2_name)
192+
v.add_labels(seg, visible=False, name=f"{seg_name}s")
193+
v.add_labels(seg_extended, name=f"{seg_name}s-extended")
184194
if mask is not None:
185195
v.add_labels(mask, name="mask-for-background", visible=False)
186196

187197
# Add additional layers for intensity coloring and classification
188198
# data_numerical = np.zeros(gfp.shape, dtype="float32")
189-
data_labels = np.zeros(gfp.shape, dtype="uint8")
199+
data_labels = np.zeros(stain1.shape, dtype="uint8")
190200

191201
# v.add_image(data_numerical, name="gfp-intensity")
192202
v.add_labels(data_labels, name="positive-negative")
@@ -210,7 +220,7 @@ def gfp_annotation(prefix, default_stat="median", background_norm=None, is_otof=
210220
call_button="Pick Value"
211221
)
212222
def pick_widget(viewer: napari.Viewer, value: float = 0.0):
213-
layer = viewer.layers["SGNs-extended"]
223+
layer = viewer.layers[f"{seg_name}s-extended"]
214224
selected_id = layer.selected_label
215225

216226
stat_name = stat_widget.param_box.currentText()
@@ -239,9 +249,9 @@ def threshold_widget(viewer: napari.Viewer, threshold: float = (max_val - min_va
239249
vals = statistics[stat_name].values
240250
pos_ids = label_ids[vals >= threshold]
241251
neg_ids = label_ids[vals <= threshold]
242-
data_labels = np.zeros(gfp.shape, dtype="uint8")
243-
data_labels[np.isin(sgns_extended, pos_ids)] = 2
244-
data_labels[np.isin(sgns_extended, neg_ids)] = 1
252+
data_labels = np.zeros(stain1.shape, dtype="uint8")
253+
data_labels[np.isin(seg_extended, pos_ids)] = 2
254+
data_labels[np.isin(seg_extended, neg_ids)] = 1
245255
viewer.layers["positive-negative"].data = data_labels
246256

247257
threshold_widget.viewer.value = v
@@ -250,7 +260,7 @@ def threshold_widget(viewer: napari.Viewer, threshold: float = (max_val - min_va
250260
v.window.add_dock_widget(stat_widget, area="right")
251261
v.window.add_dock_widget(pick_widget, area="right")
252262
v.window.add_dock_widget(threshold_widget, area="right")
253-
stat_widget.setWindowTitle("GFP Histogram")
263+
stat_widget.setWindowTitle(f"{stain1_name} Histogram")
254264

255265
napari.run()
256266

@@ -260,13 +270,17 @@ def threshold_widget(viewer: napari.Viewer, threshold: float = (max_val - min_va
260270
# - M_LR_000145_L: rough alignment is ok, detailed alignment also ok.
261271
# - M_LR_000151_R: rough alignment is ok, detailed alignment also ok.
262272
def main():
263-
parser = argparse.ArgumentParser(description="Start a GUI for determining an intensity threshold for positive / negative transduction in segmented cells.")
273+
parser = argparse.ArgumentParser(
274+
description="Start a GUI for determining an intensity threshold for positive "
275+
"/ negative transduction in segmented cells.")
264276
parser.add_argument("prefix", help="The prefix of the files to open with the annotation tool.")
265-
parser.add_argument(
266-
"-b", "--background_norm", help="How to normalize the intensity values for background intensity."
267-
"Valid options are 'division' and 'subtraction'. If nothing is passed then the intensity values are not normalized."
268-
)
269-
parser.add_argument("--otof", action="store_true", help="Whether to run the annotation tool for otof samples with VGlut3, Alphatag and IHC segmentation.") # noqa
277+
parser.add_argument("-b", "--background_norm",
278+
help="How to normalize the intensity values for background intensity."
279+
"Valid options are 'division' and 'subtraction'."
280+
"If nothing is passed then the intensity values are not normalized.")
281+
parser.add_argument("--otof", action="store_true",
282+
help="Whether to run the annotation tool for otof samples with VGlut3, "
283+
"Alphatag and IHC segmentation.") # noqa
270284
args = parser.parse_args()
271285

272286
gfp_annotation(args.prefix, background_norm=args.background_norm, is_otof=args.otof)

0 commit comments

Comments
 (0)