Skip to content

Commit aa1494c

Browse files
Implement mapping of synapses to IHC and create draft for marker detection workflow
1 parent c3e2766 commit aa1494c

File tree

2 files changed

+134
-0
lines changed

2 files changed

+134
-0
lines changed
Lines changed: 77 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,77 @@
1+
from typing import Optional
2+
3+
import numpy as np
4+
import pandas as pd
5+
6+
from elf.parallel.distance_transform import map_points_to_objects
7+
8+
9+
def map_and_filter_detections(
10+
segmentation: np.ndarray,
11+
detections: pd.DataFrame,
12+
max_distance: float,
13+
resolution: float = 0.38,
14+
n_threads: Optional[int] = None,
15+
verbose: bool = True,
16+
) -> pd.DataFrame:
17+
"""Map synapse detections to segmented IHCs and filter out detections above a distance threshold to the IHCs.
18+
19+
Args:
20+
segmentation: The IHC segmentation.
21+
detections: The synapse marker detections.
22+
max_distance: The maximal distance for a valid match of synapse markers to IHCs.
23+
resolution: The resolution / voxel size of the data in micrometer.
24+
n_threads: The number of threads for parallelizing the mapping of detections to objects.
25+
verbose: Whether to print the progress of the mapping procedure.
26+
"""
27+
# Get the point coordinates.
28+
points = detections[["z", "y", "x"]].values.astype("int")
29+
30+
# Set the block shape (this could also be exposed as a parameter; it should not matter much though).
31+
block_shape = (64, 256, 256)
32+
33+
# Determine the halo. We set it to 2 pixels + the max-distance in pixels, to ensure all distances
34+
# that are smaller than the max distance are measured.
35+
halo = (2 + int(np.ceil(max_distance / resolution)),) * 3
36+
37+
# Map the detections to the obejcts in the (IHC) segmentation.
38+
object_ids, object_distances = map_points_to_objects(
39+
segmentation=segmentation,
40+
points=points,
41+
block_shape=block_shape,
42+
halo=halo,
43+
sampling=resolution,
44+
n_threads=n_threads,
45+
verbose=verbose,
46+
)
47+
assert len(object_ids) == len(points)
48+
assert len(object_distances) == len(points)
49+
50+
# Add matched ids and distances to the dataframe.
51+
detections["matched_ihc"] = object_ids
52+
detections["distance_to_ihc"] = object_distances
53+
54+
# Filter the dataframe by the max distance.
55+
detections = detections[detections.distance_to_ihc < max_distance]
56+
return detections
57+
58+
59+
# TODO implement streamlined workflow for the marker detection, mapping and filtering.
60+
def marker_detection():
61+
"""
62+
"""
63+
64+
# 1.) Determine mask for inference based on the IHC segmentation.
65+
# Best approach: load IHC segmentation at a low scale level, binarize it,
66+
# dilate it and use this as mask. It can be mapped back to the full resolution
67+
# with `elf.wrapper.ResizedVolume`.
68+
69+
# 2.) Run inference and detection of maxima.
70+
# This can be taken from 'scripts/synapse_marker_detection/run_prediction.py'
71+
# (And the run prediction script should then be refactored).
72+
73+
# 3.) Map the detections to IHC and filter them based on a distance criterion.
74+
# Use the function 'map_and_filter_detections' from above.
75+
76+
# 4.) Add the filtered detections to MoBIE.
77+
# IMPORTANT scale the coordinates with the resolution here.
Lines changed: 57 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,57 @@
1+
import os
2+
3+
import imageio.v3 as imageio
4+
import pandas as pd
5+
6+
from flamingo_tools.segmentation.marker_detection import map_and_filter_detections
7+
8+
9+
ROOT = "/mnt/vast-nhr/projects/nim00007/data/moser/cochlea-lightsheet/croppings/Synapse_crop"
10+
VGLUT_PATH = os.path.join(ROOT, "M_LR_000226_R_crop_1098-0926-0872_Vglut3.tif")
11+
CTBP2_PATH = os.path.join(ROOT, "M_LR_000226_R_crop_1098-0926-0872_CTBP2.tif")
12+
SEG_PATH = os.path.join(ROOT, "M_LR_000226_R_resized_crop_1098-0926-0872_IHC.tif")
13+
DET_PATH = os.path.join(ROOT, "synapses/synapse_detection.tsv")
14+
15+
16+
def check_data():
17+
import napari
18+
19+
detections = pd.read_csv(DET_PATH, sep="\t")
20+
detections = detections[["z", "y", "x"]].values
21+
22+
filtered_path = os.path.join(ROOT, "synapses/synapse_detection_filtered.tsv")
23+
filtered_detections = pd.read_csv(filtered_path, sep="\t")
24+
filtered_detections = filtered_detections[["z", "y", "x"]].values
25+
26+
vglut = imageio.imread(VGLUT_PATH)
27+
ctbp2 = imageio.imread(CTBP2_PATH)
28+
ihcs = imageio.imread(SEG_PATH)
29+
30+
v = napari.Viewer()
31+
v.add_image(vglut)
32+
v.add_image(ctbp2)
33+
v.add_labels(ihcs)
34+
v.add_points(detections)
35+
v.add_points(filtered_detections)
36+
napari.run()
37+
38+
39+
def map_synapses():
40+
ihcs = imageio.imread(SEG_PATH)
41+
detections = pd.read_csv(DET_PATH, sep="\t")
42+
n_detections = len(detections)
43+
44+
detections = map_and_filter_detections(ihcs, detections, max_distance=2.0, n_threads=8)
45+
print("Detections after mapping and fitering:", len(detections), "/", n_detections)
46+
47+
out_path = os.path.join(ROOT, "synapses/synapse_detection_filtered.tsv")
48+
detections.to_csv(out_path, sep="\t", index=False)
49+
50+
51+
def main():
52+
map_synapses()
53+
# check_data()
54+
55+
56+
if __name__ == "__main__":
57+
main()

0 commit comments

Comments
 (0)