Skip to content

Commit c0d2820

Browse files
Merge branch 'master' of https://github.com/computational-cell-analytics/lightsheet-moser into intensity-masking
2 parents 94774a0 + e1e7c48 commit c0d2820

23 files changed

+760
-98
lines changed

flamingo_tools/segmentation/cochlea_mapping.py

Lines changed: 13 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -383,14 +383,15 @@ def measure_run_length_ihcs(
383383
return total_distance, path, path_dict
384384

385385

386-
def map_frequency(table: pd.DataFrame, cell_type: str, animal: str = "mouse") -> pd.DataFrame:
386+
def map_frequency(table: pd.DataFrame, animal: str = "mouse") -> pd.DataFrame:
387387
"""Map the frequency range of SGNs in the cochlea
388388
using Greenwood function f(x) = A * (10 **(ax) - K).
389389
Values for humans: a=2.1, k=0.88, A = 165.4 [kHz].
390390
For mice: fit values between minimal (1kHz) and maximal (80kHz) values
391391
392392
Args:
393393
table: Dataframe containing the segmentation.
394+
animal: Select the Greenwood function parameters specific to a species. Either "mouse" or "gerbil".
394395
395396
Returns:
396397
Dataframe containing frequency in an additional column 'frequency[kHz]'.
@@ -569,6 +570,7 @@ def tonotopic_mapping(
569570
component_mapping: Optional[List[int]] = None,
570571
cell_type: str = "ihc",
571572
animal: str = "mouse",
573+
apex_higher: bool = True,
572574
) -> pd.DataFrame:
573575
"""Tonotopic mapping of IHCs by supplying a table with component labels.
574576
The mapping assigns a tonotopic label to each IHC according to the position along the length of the cochlea.
@@ -591,19 +593,25 @@ def tonotopic_mapping(
591593
component_mapping = component_label
592594

593595
if cell_type == "ihc":
594-
total_distance, _, path_dict = measure_run_length_ihcs(centroids, component_label=component_label)
596+
total_distance, _, path_dict = measure_run_length_ihcs(
597+
centroids, component_label=component_label, apex_higher=apex_higher,
598+
)
595599

596600
else:
597601
if len(component_mapping) == 1:
598-
total_distance, _, path_dict = measure_run_length_sgns(centroids)
602+
total_distance, _, path_dict = measure_run_length_sgns(
603+
centroids, apex_higher=apex_higher,
604+
)
599605

600606
else:
601607
centroids_components = []
602608
for label in component_mapping:
603609
subset = table[table["component_labels"] == label]
604610
subset_centroids = list(zip(subset["anchor_x"], subset["anchor_y"], subset["anchor_z"]))
605611
centroids_components.append(subset_centroids)
606-
total_distance, _, path_dict = measure_run_length_sgns_multi_component(centroids_components)
612+
total_distance, _, path_dict = measure_run_length_sgns_multi_component(
613+
centroids_components, apex_higher=apex_higher,
614+
)
607615

608616
node_dict = node_dict_from_path_dict(path_dict, label_ids, centroids)
609617

@@ -622,6 +630,6 @@ def tonotopic_mapping(
622630

623631
table.loc[:, "length[µm]"] = table["length_fraction"] * total_distance
624632

625-
table = map_frequency(table, cell_type=cell_type, animal=animal)
633+
table = map_frequency(table, animal=animal)
626634

627635
return table

flamingo_tools/segmentation/postprocessing.py

Lines changed: 7 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -371,7 +371,7 @@ def components_sgn(
371371
threshold_erode: Optional[float] = None,
372372
min_component_length: int = 50,
373373
max_edge_distance: float = 30,
374-
iterations_erode: Optional[int] = None,
374+
iterations_erode: int = 0,
375375
postprocess_threshold: Optional[float] = None,
376376
postprocess_components: Optional[List[int]] = None,
377377
) -> List[List[int]]:
@@ -383,7 +383,7 @@ def components_sgn(
383383
threshold_erode: Threshold of column value after erosion step with spatial statistics.
384384
min_component_length: Minimal length for filtering out connected components.
385385
max_edge_distance: Maximal distance in micrometer between points to create edges for connected components.
386-
iterations_erode: Number of iterations for erosion, normally determined automatically.
386+
iterations_erode: Number of iterations for erosion.
387387
postprocess_threshold: Post-process graph connected components by searching for points closer than threshold.
388388
postprocess_components: Post-process specific graph connected components ([0] for largest component only).
389389
@@ -392,7 +392,7 @@ def components_sgn(
392392
"""
393393
if keyword not in table:
394394
distance_avg = nearest_neighbor_distance(table, n_neighbors=100)
395-
table[keyword] = list(distance_avg)
395+
table.loc[:, keyword] = list(distance_avg)
396396

397397
centroids = list(zip(table["anchor_x"], table["anchor_y"], table["anchor_z"]))
398398
labels = [int(i) for i in list(table["label_id"])]
@@ -401,18 +401,16 @@ def components_sgn(
401401
distance_nn.sort()
402402

403403
if len(table) < 20000:
404-
iterations = iterations_erode if iterations_erode is not None else 0
405404
min_cells = None
406405
average_dist = int(distance_nn[int(len(table) * 0.8)])
407406
threshold = threshold_erode if threshold_erode is not None else average_dist
408407
else:
409-
iterations = iterations_erode if iterations_erode is not None else 15
410408
min_cells = 20000
411409
threshold = threshold_erode if threshold_erode is not None else 40
412410

413-
if iterations != 0:
411+
if iterations_erode != 0:
414412
print(f"Using threshold of {threshold} micrometer for eroding segmentation with keyword {keyword}.")
415-
new_subset = erode_subset(table.copy(), iterations=iterations,
413+
new_subset = erode_subset(table.copy(), iterations=iterations_erode,
416414
threshold=threshold, min_cells=min_cells, keyword=keyword)
417415
else:
418416
new_subset = table.copy()
@@ -458,7 +456,7 @@ def label_components_sgn(
458456
threshold_erode: Optional[float] = None,
459457
min_component_length: int = 50,
460458
max_edge_distance: float = 30,
461-
iterations_erode: Optional[int] = None,
459+
iterations_erode: int = 0,
462460
postprocess_threshold: Optional[float] = None,
463461
postprocess_components: Optional[List[int]] = None,
464462
) -> List[int]:
@@ -470,7 +468,7 @@ def label_components_sgn(
470468
threshold_erode: Threshold of column value after erosion step with spatial statistics.
471469
min_component_length: Minimal length for filtering out connected components.
472470
max_edge_distance: Maximal distance in micrometer between points to create edges for connected components.
473-
iterations_erode: Number of iterations for erosion, normally determined automatically.
471+
iterations_erode: Number of iterations for erosion.
474472
postprocess_threshold: Post-process graph connected components by searching for points closer than threshold.
475473
postprocess_components: Post-process specific graph connected components ([0] for largest component only).
476474

flamingo_tools/segmentation/synapse_detection.py

Lines changed: 5 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -175,14 +175,18 @@ def marker_detection(
175175
if os.path.exists(output_path) and prediction_key in zarr.open(output_path, "r"):
176176
skip_prediction = True
177177

178+
# skip prediction if post-processed output exists
179+
detection_path = os.path.join(output_folder, "synapse_detection.tsv")
180+
if os.path.exists(detection_path):
181+
skip_prediction = True
182+
178183
if not skip_prediction:
179184
prediction_impl(
180185
input_path, input_key, output_folder, model_path,
181186
scale=None, block_shape=block_shape, halo=halo,
182187
apply_postprocessing=False, output_channels=1,
183188
)
184189

185-
detection_path = os.path.join(output_folder, "synapse_detection.tsv")
186190
if not os.path.exists(detection_path):
187191
input_ = zarr.open(output_path, "r")[prediction_key]
188192
detections = find_local_maxima(
Lines changed: 41 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,41 @@
1+
[
2+
{
3+
"cochlea": "G_EK_000233_L",
4+
"image_channel": [
5+
"Vglut3",
6+
"IHC_v6"
7+
],
8+
"halo_size": [
9+
196,
10+
196,
11+
80
12+
],
13+
"crop_centers": [
14+
[
15+
593,
16+
1018,
17+
276
18+
],
19+
[
20+
674,
21+
1048,
22+
1574
23+
],
24+
[
25+
788,
26+
937,
27+
255
28+
],
29+
[
30+
950,
31+
1450,
32+
380
33+
],
34+
[
35+
1120,
36+
875,
37+
360
38+
]
39+
]
40+
}
41+
]
Lines changed: 50 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,50 @@
1+
[
2+
{
3+
"cochlea": "G_LR_000233_R",
4+
"image_channel": [
5+
"PV"
6+
],
7+
"halo_size": [
8+
196,
9+
196,
10+
196
11+
],
12+
"crop_centers": [
13+
[
14+
620,
15+
1060,
16+
1050
17+
],
18+
[
19+
1555,
20+
1170,
21+
1230
22+
],
23+
[
24+
1600,
25+
1075,
26+
1090
27+
],
28+
[
29+
1390,
30+
1430,
31+
1375
32+
],
33+
[
34+
650,
35+
1130,
36+
1000
37+
],
38+
[
39+
800,
40+
1230,
41+
710
42+
],
43+
[
44+
600,
45+
1100,
46+
1055
47+
]
48+
]
49+
}
50+
]
Lines changed: 36 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,36 @@
1+
[
2+
{
3+
"cochlea": "G_LR_000233_R",
4+
"image_channel": [
5+
"PV",
6+
"SGN_v2-DA"
7+
],
8+
"halo_size": [
9+
196,
10+
196,
11+
48
12+
],
13+
"crop_centers": [
14+
[
15+
1584,
16+
1570,
17+
958
18+
],
19+
[
20+
1474,
21+
1655,
22+
730
23+
],
24+
[
25+
1275,
26+
1604,
27+
1324
28+
],
29+
[
30+
1603,
31+
1240,
32+
1279
33+
]
34+
]
35+
}
36+
]
Lines changed: 25 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,25 @@
1+
[
2+
{
3+
"cochlea": "LaVision-M04",
4+
"image_channel": [
5+
"PV"
6+
],
7+
"resolution" : [
8+
1.887779,
9+
1.887779,
10+
3.0
11+
],
12+
"halo_size": [
13+
128,
14+
128,
15+
64
16+
],
17+
"crop_centers": [
18+
[
19+
2550,
20+
2340,
21+
440
22+
]
23+
]
24+
}
25+
]
Lines changed: 41 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,41 @@
1+
[
2+
{
3+
"cochlea": "LaVision-M04",
4+
"image_channel": [
5+
"MYO",
6+
"IHC_LOWRES-v1"
7+
],
8+
"resolution" : [
9+
1.887779,
10+
1.887779,
11+
3.0
12+
],
13+
"halo_size": [
14+
128,
15+
128,
16+
32
17+
],
18+
"crop_centers": [
19+
[
20+
3118,
21+
2840,
22+
500
23+
],
24+
[
25+
1540,
26+
2075,
27+
924
28+
],
29+
[
30+
2135,
31+
835,
32+
627
33+
],
34+
[
35+
2025,
36+
1332,
37+
221
38+
]
39+
]
40+
}
41+
]

0 commit comments

Comments
 (0)