Skip to content

Commit b557488

Browse files
Update validation visualization
1 parent 9c4d8d3 commit b557488

File tree

2 files changed

+23
-13
lines changed

2 files changed

+23
-13
lines changed

flamingo_tools/validation.py

Lines changed: 6 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -245,13 +245,17 @@ def for_visualization(segmentation, annotations, matches):
245245

246246
point_vis = annotations.copy()
247247
tps = matches["tp_annotations"]
248+
match_properties = ["tp" if aid in tps else "fn" for aid in range(len(annotations))]
249+
# The color cycle assigns the first color to the first property etc.
250+
# So we need to set the first color to red if the first id is a false negative and vice versa.
251+
color_cycle = green_red[::-1] if match_properties[0] == "fn" else green_red
248252
point_props = dict(
249253
properties={
250254
"id": list(range(len(annotations))),
251-
"match": ["tp" if aid in tps else "fn" for aid in range(len(annotations))]
255+
"match": match_properties,
252256
},
253257
face_color="match",
254-
face_color_cycle=green_red[::-1],
258+
face_color_cycle=color_cycle,
255259
border_width=0.25,
256260
size=10,
257261
)

scripts/validation/visualize_validation.py

Lines changed: 17 additions & 11 deletions
Original file line numberDiff line numberDiff line change
@@ -1,28 +1,34 @@
1+
import argparse
12
import os
23

34
import imageio.v3 as imageio
45
import napari
56

6-
from flamingo_tools.validation import fetch_data_for_evaluation, compute_matches_for_annotated_slice, for_visualization
7+
from flamingo_tools.validation import (
8+
fetch_data_for_evaluation, compute_matches_for_annotated_slice, for_visualization, parse_annotation_path
9+
)
710

811
# ROOT = "/mnt/vast-nhr/projects/nim00007/data/moser/cochlea-lightsheet/AnnotatedImageCrops/F1Validation"
912
ROOT = "annotation_data"
10-
TEST_ANNOTATION = os.path.join(ROOT, "AnnotationsEK/MAMD58L_PV_z771_base_full_annotationsEK.csv")
1113

1214

1315
def main():
14-
image = imageio.imread(os.path.join(ROOT, "MAMD58L_PV_z771_base_full.tif"))
16+
parser = argparse.ArgumentParser()
17+
parser.add_argument("--image", required=True)
18+
parser.add_argument("--annotation", required=True)
19+
parser.add_argument("--cache_folder")
20+
args = parser.parse_args()
21+
cache_folder = args.cache_folder
22+
23+
cochlea, slice_id = parse_annotation_path(args.annotation)
24+
cache_path = None if cache_folder is None else os.path.join(cache_folder, f"{cochlea}_{slice_id}.tif")
25+
26+
image = imageio.imread(args.image)
1527
segmentation, annotations = fetch_data_for_evaluation(
16-
TEST_ANNOTATION, cache_path="./seg.tif", components_for_postprocessing=[1],
28+
args.annotation, cache_path=cache_path, components_for_postprocessing=[1],
1729
)
1830

19-
# v = napari.Viewer()
20-
# v.add_image(image)
21-
# v.add_labels(segmentation)
22-
# v.add_points(annotations)
23-
# napari.run()
24-
25-
matches = compute_matches_for_annotated_slice(segmentation, annotations)
31+
matches = compute_matches_for_annotated_slice(segmentation, annotations, matching_tolerance=5)
2632
tps, fns = matches["tp_annotations"], matches["fn"]
2733
vis_segmentation, vis_points, seg_props, point_props = for_visualization(segmentation, annotations, matches)
2834

0 commit comments

Comments
 (0)