Skip to content

Commit 3b63dce

Browse files
Update SGN evaluation
1 parent 1eda4e6 commit 3b63dce

File tree

9 files changed

+110
-71
lines changed

9 files changed

+110
-71
lines changed

flamingo_tools/validation.py

Lines changed: 1 addition & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -35,12 +35,10 @@ def parse_annotation_path(annotation_path):
3535
return cochlea, slice_id
3636

3737

38-
# TODO enable table component filtering with MoBIE table
39-
# NOTE: the main component is always #1
4038
def fetch_data_for_evaluation(
4139
annotation_path: str,
4240
cache_path: Optional[str] = None,
43-
seg_name: str = "SGN",
41+
seg_name: str = "SGN_v2",
4442
z_extent: int = 0,
4543
components_for_postprocessing: Optional[List[int]] = None,
4644
) -> Tuple[np.ndarray, pd.DataFrame]:

scripts/sgn_stain_predictions/check_segmentation.py

Lines changed: 13 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -6,7 +6,8 @@
66

77

88
ROOT = "/mnt/vast-nhr/projects/nim00007/data/moser/cochlea-lightsheet/LS_sampleprepcomparison_crops"
9-
SAVE_ROOT = "/mnt/vast-nhr/projects/nim00007/data/moser/cochlea-lightsheet/LS_sampleprepcomparison_crops/segmentations"
9+
SAVE_ROOT1 = "/mnt/vast-nhr/projects/nim00007/data/moser/cochlea-lightsheet/LS_sampleprepcomparison_crops/segmentations" # noqa
10+
SAVE_ROOT2 = "/mnt/vast-nhr/projects/nim00007/data/moser/cochlea-lightsheet/LS_sampleprepcomparison_crops/segmentations_v2" # noqa
1011

1112

1213
def main():
@@ -16,18 +17,22 @@ def main():
1617
return
1718
print("Visualizing", ff)
1819
rel_path = os.path.relpath(ff, ROOT)
19-
seg_path = os.path.join(SAVE_ROOT, rel_path)
20+
seg_path1 = os.path.join(SAVE_ROOT1, rel_path)
21+
seg_path2 = os.path.join(SAVE_ROOT2, rel_path)
2022

23+
print("Load raw")
2124
image = imageio.imread(ff)
22-
if os.path.exists(seg_path):
23-
seg = imageio.imread(seg_path)
24-
else:
25-
seg = None
25+
print("Load segmentation 1")
26+
seg1 = imageio.imread(seg_path1) if os.path.exists(seg_path1) else None
27+
print("Load segmentation 2")
28+
seg2 = imageio.imread(seg_path2) if os.path.exists(seg_path2) else None
2629

2730
v = napari.Viewer()
2831
v.add_image(image)
29-
if seg is not None:
30-
v.add_labels(seg)
32+
if seg1 is not None:
33+
v.add_labels(seg1, name="original")
34+
if seg2 is not None:
35+
v.add_labels(seg2, name="adapted")
3136
napari.run()
3237

3338

scripts/sgn_stain_predictions/measure_intensities.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -6,12 +6,12 @@
66

77

88
ROOT = "/mnt/vast-nhr/projects/nim00007/data/moser/cochlea-lightsheet/LS_sampleprepcomparison_crops"
9-
SAVE_ROOT = "/mnt/vast-nhr/projects/nim00007/data/moser/cochlea-lightsheet/LS_sampleprepcomparison_crops/segmentations"
9+
SAVE_ROOT = "/mnt/vast-nhr/projects/nim00007/data/moser/cochlea-lightsheet/LS_sampleprepcomparison_crops/segmentations_v2" # noqa
1010

1111

1212
def measure_intensities(ff):
1313
rel_path = os.path.relpath(ff, ROOT)
14-
out_path = os.path.join("./measurements", rel_path.replace(".tif", ".xlsx"))
14+
out_path = os.path.join("./measurements_v2", rel_path.replace(".tif", ".xlsx"))
1515
if os.path.exists(out_path):
1616
return
1717

scripts/sgn_stain_predictions/run_prediction.py

Lines changed: 3 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -7,9 +7,10 @@
77
from flamingo_tools.segmentation import run_unet_prediction
88

99
ROOT = "/mnt/vast-nhr/projects/nim00007/data/moser/cochlea-lightsheet/LS_sampleprepcomparison_crops"
10-
MODEL_PATH = "/mnt/vast-nhr/projects/nim00007/data/moser/cochlea-lightsheet/trained_models/SGN/cochlea_distance_unet_SGN_March2025Model" # noqa
10+
# MODEL_PATH = "/mnt/vast-nhr/projects/nim00007/data/moser/cochlea-lightsheet/trained_models/SGN/cochlea_distance_unet_SGN_March2025Model" # noqa
11+
MODEL_PATH = "/mnt/vast-nhr/home/pape41/u12086/Work/my_projects/flamingo-tools/scripts/training/sgn_model.pt" # noqa
1112

12-
SAVE_ROOT = "/mnt/vast-nhr/projects/nim00007/data/moser/cochlea-lightsheet/LS_sampleprepcomparison_crops/segmentations"
13+
SAVE_ROOT = "/mnt/vast-nhr/projects/nim00007/data/moser/cochlea-lightsheet/LS_sampleprepcomparison_crops/segmentations_v2" # noqa
1314

1415

1516
def check_data():

scripts/validation/analyze.py renamed to scripts/validation/SGNs/analyze.py

Lines changed: 7 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,8 +1,14 @@
1+
import argparse
12
import pandas as pd
23

34
# TODO more logic to separate by annotator etc.
45
# For now this is just a simple script for global eval
5-
table = pd.read_csv("./results.csv")
6+
7+
parser = argparse.ArgumentParser()
8+
parser.add_argument("file")
9+
args = parser.parse_args()
10+
11+
table = pd.read_csv(args.file)
612
print("Table:")
713
print(table)
814

scripts/validation/check_annotations.py renamed to scripts/validation/SGNs/compare_annotations.py

Lines changed: 1 addition & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -4,8 +4,7 @@
44
import napari
55
import pandas as pd
66

7-
# ROOT = "/mnt/vast-nhr/projects/nim00007/data/moser/cochlea-lightsheet/AnnotatedImageCrops/F1Validation"
8-
ROOT = "annotation_data"
7+
ROOT = "/mnt/vast-nhr/projects/nim00007/data/moser/cochlea-lightsheet/AnnotatedImageCrops/F1Validation"
98
TEST_ANNOTATION = os.path.join(ROOT, "AnnotationsEK/MAMD58L_PV_z771_base_full_annotationsEK.csv")
109

1110

scripts/validation/run_evaluation.py renamed to scripts/validation/SGNs/run_evaluation.py

Lines changed: 4 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -6,8 +6,8 @@
66
fetch_data_for_evaluation, parse_annotation_path, compute_scores_for_annotated_slice
77
)
88

9-
ROOT = "/mnt/vast-nhr/projects/nim00007/data/moser/cochlea-lightsheet/AnnotatedImageCrops/F1Validation"
10-
ANNOTATION_FOLDERS = ["AnnotationsEK", "AnnotationsAMD"]
9+
ROOT = "/mnt/vast-nhr/projects/nim00007/data/moser/cochlea-lightsheet/AnnotatedImageCrops/F1ValidationSGNs"
10+
ANNOTATION_FOLDERS = ["AnnotationsEK", "AnnotationsAMD", "AnnotationLR"]
1111

1212

1313
def run_evaluation(root, annotation_folders, result_file, cache_folder):
@@ -27,12 +27,13 @@ def run_evaluation(root, annotation_folders, result_file, cache_folder):
2727
annotator = folder[len("Annotations"):]
2828
annotations = sorted(glob(os.path.join(root, folder, "*.csv")))
2929
for annotation_path in annotations:
30+
print(annotation_path)
3031
cochlea, slice_id = parse_annotation_path(annotation_path)
3132
# We don't have this cochlea in MoBIE yet
3233
if cochlea == "M_LR_000169_R":
3334
continue
3435

35-
print("Run evaluation for", annotator, cochlea, slice_id)
36+
print("Run evaluation for", annotator, cochlea, "z=", slice_id)
3637
segmentation, annotations = fetch_data_for_evaluation(
3738
annotation_path, components_for_postprocessing=[1],
3839
cache_path=None if cache_folder is None else os.path.join(cache_folder, f"{cochlea}_{slice_id}.tif")
Lines changed: 79 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,79 @@
1+
import argparse
2+
import os
3+
from glob import glob
4+
5+
import napari
6+
import tifffile
7+
8+
from flamingo_tools.validation import (
9+
fetch_data_for_evaluation, compute_matches_for_annotated_slice, for_visualization, parse_annotation_path
10+
)
11+
12+
ROOT = "/mnt/vast-nhr/projects/nim00007/data/moser/cochlea-lightsheet/AnnotatedImageCrops/F1ValidationSGNs"
13+
14+
15+
def _match_image_path(annotation_path):
16+
all_files = glob(os.path.join(ROOT, "*.tif"))
17+
prefix = os.path.basename(annotation_path).split("_")[:-3]
18+
prefix = "_".join(prefix)
19+
matches = [path for path in all_files if os.path.basename(path).startswith(prefix)]
20+
# if len(matches) != 1:
21+
# breakpoint()
22+
assert len(matches) == 1, f"{prefix}: {len(matches)}"
23+
return matches[0]
24+
25+
26+
def visualize_anotation(annotation_path, cache_folder):
27+
print("Checking", annotation_path)
28+
cochlea, slice_id = parse_annotation_path(annotation_path)
29+
cache_path = None if cache_folder is None else os.path.join(cache_folder, f"{cochlea}_{slice_id}.tif")
30+
31+
image_path = _match_image_path(annotation_path)
32+
return
33+
34+
segmentation, annotations = fetch_data_for_evaluation(
35+
annotation_path, cache_path=cache_path, components_for_postprocessing=[1],
36+
)
37+
38+
image = tifffile.memmap(image_path)
39+
if segmentation.ndim == 2:
40+
image = image[image.shape[0] // 2]
41+
assert image.shape == segmentation.shape, f"{image.shape}, {segmentation.shape}"
42+
43+
matches = compute_matches_for_annotated_slice(segmentation, annotations, matching_tolerance=5)
44+
vis_segmentation, vis_points, seg_props, point_props = for_visualization(segmentation, annotations, matches)
45+
46+
# tps, fns = matches["tp_annotations"], matches["fn"]
47+
# print("True positive annotations:")
48+
# print(tps)
49+
# print("False negative annotations:")
50+
# print(fns)
51+
52+
v = napari.Viewer()
53+
v.add_image(image)
54+
v.add_labels(vis_segmentation, **seg_props)
55+
v.add_points(vis_points, **point_props)
56+
v.add_labels(segmentation, visible=False)
57+
v.add_points(annotations, visible=False)
58+
v.title = os.path.relpath(annotation_path, ROOT)
59+
napari.run()
60+
61+
62+
def main():
63+
parser = argparse.ArgumentParser()
64+
parser.add_argument("--annotations", nargs="+")
65+
parser.add_argument("--cache_folder")
66+
args = parser.parse_args()
67+
cache_folder = args.cache_folder
68+
69+
if args.annotations is None:
70+
annotation_paths = sorted(glob(os.path.join(ROOT, "**", "*.csv"), recursive=True))
71+
else:
72+
annotation_paths = args.annotations
73+
74+
for annotation_path in annotation_paths:
75+
visualize_anotation(annotation_path, cache_folder)
76+
77+
78+
if __name__ == "__main__":
79+
main()

scripts/validation/visualize_validation.py

Lines changed: 0 additions & 50 deletions
This file was deleted.

0 commit comments

Comments
 (0)