Skip to content

Commit f68db1f

Browse files
Update evaluation code and add script for resizing annotations
1 parent aa1494c commit f68db1f

File tree

5 files changed

+115
-13
lines changed

5 files changed

+115
-13
lines changed

flamingo_tools/validation.py

Lines changed: 20 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -176,13 +176,21 @@ def compute_matches_for_annotated_slice(
176176
segmentation_ids = np.unique(segmentation)[1:]
177177

178178
# Crop to the minimal enclosing bounding box of points and segmented objects.
179-
bb_seg = np.where(segmentation != 0)
180-
bb_seg = tuple(slice(int(bb.min()), int(bb.max())) for bb in bb_seg)
181-
bb_points = tuple(
182-
slice(int(np.floor(annotations[coords].min())), int(np.ceil(annotations[coords].max())) + 1)
183-
for coords in coordinates
184-
)
185-
bbox = tuple(slice(min(bbs.start, bbp.start), max(bbs.stop, bbp.stop)) for bbs, bbp in zip(bb_seg, bb_points))
179+
seg_mask = segmentation != 0
180+
if seg_mask.sum() > 0:
181+
bb_seg = np.where(seg_mask)
182+
bb_seg = tuple(slice(int(bb.min()), int(bb.max())) for bb in bb_seg)
183+
bb_points = tuple(
184+
slice(int(np.floor(annotations[coords].min())), int(np.ceil(annotations[coords].max())) + 1)
185+
for coords in coordinates
186+
)
187+
bbox = tuple(slice(min(bbs.start, bbp.start), max(bbs.stop, bbp.stop)) for bbs, bbp in zip(bb_seg, bb_points))
188+
else:
189+
print("The segmentation is empty!!!")
190+
bbox = tuple(
191+
slice(int(np.floor(annotations[coords].min())), int(np.ceil(annotations[coords].max())) + 1)
192+
for coords in coordinates
193+
)
186194
segmentation = segmentation[bbox]
187195

188196
annotations = annotations.copy()
@@ -231,6 +239,11 @@ def compute_scores_for_annotated_slice(
231239
return {"tp": tp, "fp": fp, "fn": fn}
232240

233241

242+
# TODO
243+
def create_consensus_annotations():
244+
pass
245+
246+
234247
def for_visualization(segmentation, annotations, matches):
235248
green_red = ["#00FF00", "#FF0000"]
236249

scripts/validation/SGNs/analyze.py

Lines changed: 6 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -24,9 +24,15 @@ def compute_scores(table, annotator=None):
2424
def main():
2525
parser = argparse.ArgumentParser()
2626
parser.add_argument("result_file")
27+
parser.add_argument("--all", action="store_true")
2728
args = parser.parse_args()
2829

2930
table = pd.read_csv(args.result_file)
31+
if args.all:
32+
print(table)
33+
print()
34+
print()
35+
3036
annotators = pd.unique(table.annotator)
3137

3238
results = []
Lines changed: 84 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,84 @@
1+
import os
2+
import shutil
3+
from glob import glob
4+
5+
import numpy as np
6+
import pandas as pd
7+
import tifffile
8+
import zarr
9+
10+
from flamingo_tools.s3_utils import get_s3_path, BUCKET_NAME, SERVICE_ENDPOINT
11+
12+
13+
def get_scale_factor():
14+
original_path = "/mnt/ceph-hdd/cold/nim00007/cochlea-lightsheet/M_LR_000169_R/MLR000169R_PV.tif"
15+
original_shape = tifffile.memmap(original_path).shape
16+
17+
cochlea = "M_LR_000169_R"
18+
internal_path = os.path.join(cochlea, "images", "ome-zarr", "SGN_v2.ome.zarr")
19+
s3_store, fs = get_s3_path(internal_path, bucket_name=BUCKET_NAME, service_endpoint=SERVICE_ENDPOINT)
20+
21+
input_key = "s0"
22+
with zarr.open(s3_store, mode="r") as f:
23+
new_shape = f[input_key].shape
24+
25+
scale_factor = tuple(
26+
float(nsh) / float(osh) for nsh, osh in zip(new_shape, original_shape)
27+
)
28+
return scale_factor
29+
30+
31+
def rescale_annotations(input_path, scale_factor, bkp_folder):
32+
annotations = pd.read_csv(input_path)
33+
34+
annotations_rescaled = annotations.copy()
35+
annotations_rescaled["axis-1"] = annotations["axis-1"] * scale_factor[1]
36+
annotations_rescaled["axis-2"] = annotations["axis-2"] * scale_factor[2]
37+
38+
fname = os.path.basename(input_path)
39+
name_components = fname.split("_")
40+
z = int(name_components[2][1:])
41+
new_z = int(np.round(z * scale_factor[0]))
42+
43+
name_components[2] = f"z{new_z}"
44+
name_components = name_components[:-1] + ["rescaled"] + name_components[-1:]
45+
new_fname = "_".join(name_components)
46+
47+
input_folder = os.path.split(input_path)[0]
48+
out_path = os.path.join(input_folder, new_fname)
49+
bkp_path = os.path.join(bkp_folder, fname)
50+
51+
# print(input_path)
52+
# print(out_path)
53+
# print(bkp_path)
54+
# print()
55+
# return
56+
57+
shutil.move(input_path, bkp_path)
58+
annotations_rescaled.to_csv(out_path, index=False)
59+
60+
61+
def main():
62+
# scale_factor = get_scale_factor()
63+
# print(scale_factor)
64+
scale_factor = (2.6314,) * 3
65+
66+
root = "/mnt/vast-nhr/projects/nim00007/data/moser/cochlea-lightsheet/AnnotatedImageCrops/F1ValidationSGNs"
67+
annotation_folders = ["AnnotationsEK", "AnnotationsAMD", "AnnotationsLR"]
68+
for folder in annotation_folders:
69+
bkp_folder = os.path.join(root, folder, "rescaled_bkp")
70+
os.makedirs(bkp_folder, exist_ok=True)
71+
72+
files = glob(os.path.join(root, folder, "*.csv"))
73+
for annotation_file in files:
74+
fname = os.path.basename(annotation_file)
75+
if not fname.startswith(("MLR169R_PV_z722", "MLR169R_PV_z979")):
76+
continue
77+
print("Rescaling", annotation_file)
78+
rescale_annotations(annotation_file, scale_factor, bkp_folder)
79+
80+
81+
# Rescale the point annotations for the cochlea MLR169R, which was
82+
# annotated at the original scale, but then rescaled for segmentation.
83+
if __name__ == "__main__":
84+
main()

scripts/validation/SGNs/run_evaluation.py

Lines changed: 1 addition & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -7,7 +7,7 @@
77
)
88

99
ROOT = "/mnt/vast-nhr/projects/nim00007/data/moser/cochlea-lightsheet/AnnotatedImageCrops/F1ValidationSGNs"
10-
ANNOTATION_FOLDERS = ["AnnotationsEK", "AnnotationsAMD", "AnnotationLR"]
10+
ANNOTATION_FOLDERS = ["AnnotationsEK", "AnnotationsAMD", "AnnotationsLR"]
1111

1212

1313
def run_evaluation(root, annotation_folders, result_file, cache_folder):
@@ -27,11 +27,7 @@ def run_evaluation(root, annotation_folders, result_file, cache_folder):
2727
annotator = folder[len("Annotations"):]
2828
annotations = sorted(glob(os.path.join(root, folder, "*.csv")))
2929
for annotation_path in annotations:
30-
print(annotation_path)
3130
cochlea, slice_id = parse_annotation_path(annotation_path)
32-
# We don't have this cochlea in MoBIE yet
33-
if cochlea == "M_LR_000169_R":
34-
continue
3531

3632
print("Run evaluation for", annotator, cochlea, "z=", slice_id)
3733
segmentation, annotations = fetch_data_for_evaluation(

scripts/validation/SGNs/visualize_validation.py

Lines changed: 4 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -30,8 +30,11 @@ def visualize_anotation(annotation_path, cache_folder):
3030

3131
image_path = _match_image_path(annotation_path)
3232

33+
# For debugging.
34+
components = [1]
35+
# components = None
3336
segmentation, annotations = fetch_data_for_evaluation(
34-
annotation_path, cache_path=cache_path, components_for_postprocessing=[1],
37+
annotation_path, cache_path=cache_path, components_for_postprocessing=components,
3538
)
3639

3740
image = tifffile.memmap(image_path)

0 commit comments

Comments
 (0)