Skip to content

Commit 65f1314

Browse files
Update evaluation
1 parent 3b63dce commit 65f1314

File tree

7 files changed

+267
-27
lines changed

7 files changed

+267
-27
lines changed

scripts/validation/.gitignore

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,2 @@
1+
cache/
2+
results/
Lines changed: 66 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,66 @@
1+
import os
2+
from glob import glob
3+
4+
import pandas as pd
5+
from flamingo_tools.validation import (
6+
fetch_data_for_evaluation, parse_annotation_path, compute_scores_for_annotated_slice
7+
)
8+
9+
ROOT = "/mnt/vast-nhr/projects/nim00007/data/moser/cochlea-lightsheet/AnnotatedImageCrops/F1ValidationIHCs"
10+
ANNOTATION_FOLDERS = ["Annotations_LR"]
11+
12+
13+
def run_evaluation(root, annotation_folders, result_file, cache_folder):
14+
results = {
15+
"annotator": [],
16+
"cochlea": [],
17+
"slice": [],
18+
"tps": [],
19+
"fps": [],
20+
"fns": [],
21+
}
22+
23+
if cache_folder is not None:
24+
os.makedirs(cache_folder, exist_ok=True)
25+
26+
for folder in annotation_folders:
27+
annotator = folder[len("Annotations"):]
28+
annotations = sorted(glob(os.path.join(root, folder, "*.csv")))
29+
for annotation_path in annotations:
30+
print(annotation_path)
31+
cochlea, slice_id = parse_annotation_path(annotation_path)
32+
33+
# For the cochlea M_LR_000226_R the actual component is 2, not 1
34+
component = 2 if "226_R" in cochlea else 1
35+
print("Run evaluation for", annotator, cochlea, "z=", slice_id)
36+
segmentation, annotations = fetch_data_for_evaluation(
37+
annotation_path, components_for_postprocessing=[component],
38+
seg_name="IHC_v2",
39+
cache_path=None if cache_folder is None else os.path.join(cache_folder, f"{cochlea}_{slice_id}.tif")
40+
)
41+
scores = compute_scores_for_annotated_slice(segmentation, annotations, matching_tolerance=5)
42+
results["annotator"].append(annotator)
43+
results["cochlea"].append(cochlea)
44+
results["slice"].append(slice_id)
45+
results["tps"].append(scores["tp"])
46+
results["fps"].append(scores["fp"])
47+
results["fns"].append(scores["fn"])
48+
49+
table = pd.DataFrame(results)
50+
table.to_csv(result_file, index=False)
51+
print(table)
52+
53+
54+
def main():
55+
import argparse
56+
parser = argparse.ArgumentParser()
57+
parser.add_argument("-i", "--input", default=ROOT)
58+
parser.add_argument("--folders", default=ANNOTATION_FOLDERS)
59+
parser.add_argument("--result_file", default="results.csv")
60+
parser.add_argument("--cache_folder")
61+
args = parser.parse_args()
62+
run_evaluation(args.input, args.folders, args.result_file, args.cache_folder)
63+
64+
65+
if __name__ == "__main__":
66+
main()
Lines changed: 79 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,79 @@
1+
import argparse
2+
import os
3+
from glob import glob
4+
5+
import napari
6+
import tifffile
7+
8+
from flamingo_tools.validation import (
9+
fetch_data_for_evaluation, compute_matches_for_annotated_slice, for_visualization, parse_annotation_path
10+
)
11+
12+
ROOT = "/mnt/vast-nhr/projects/nim00007/data/moser/cochlea-lightsheet/AnnotatedImageCrops/F1ValidationIHCs"
13+
14+
15+
def _match_image_path(annotation_path):
16+
all_files = glob(os.path.join(ROOT, "*.tif"))
17+
prefix = os.path.basename(annotation_path).split("_")[:-3]
18+
prefix = "_".join(prefix)
19+
matches = [path for path in all_files if os.path.basename(path).startswith(prefix)]
20+
# if len(matches) != 1:
21+
# breakpoint()
22+
assert len(matches) == 1, f"{prefix}: {len(matches)}"
23+
return matches[0]
24+
25+
26+
def visualize_anotation(annotation_path, cache_folder):
27+
print("Checking", annotation_path)
28+
cochlea, slice_id = parse_annotation_path(annotation_path)
29+
cache_path = None if cache_folder is None else os.path.join(cache_folder, f"{cochlea}_{slice_id}.tif")
30+
31+
image_path = _match_image_path(annotation_path)
32+
33+
component = 2 if "226_R" in cochlea else 1
34+
segmentation, annotations = fetch_data_for_evaluation(
35+
annotation_path, cache_path=cache_path, components_for_postprocessing=[component], seg_name="IHC_v2",
36+
)
37+
38+
image = tifffile.memmap(image_path)
39+
if segmentation.ndim == 2:
40+
image = image[image.shape[0] // 2]
41+
assert image.shape == segmentation.shape, f"{image.shape}, {segmentation.shape}"
42+
43+
matches = compute_matches_for_annotated_slice(segmentation, annotations, matching_tolerance=5)
44+
vis_segmentation, vis_points, seg_props, point_props = for_visualization(segmentation, annotations, matches)
45+
46+
# tps, fns = matches["tp_annotations"], matches["fn"]
47+
# print("True positive annotations:")
48+
# print(tps)
49+
# print("False negative annotations:")
50+
# print(fns)
51+
52+
v = napari.Viewer()
53+
v.add_image(image)
54+
v.add_labels(vis_segmentation, **seg_props)
55+
v.add_points(vis_points, **point_props)
56+
v.add_labels(segmentation, visible=False)
57+
v.add_points(annotations, visible=False)
58+
v.title = os.path.relpath(annotation_path, ROOT)
59+
napari.run()
60+
61+
62+
def main():
63+
parser = argparse.ArgumentParser()
64+
parser.add_argument("--annotations", nargs="+")
65+
parser.add_argument("--cache_folder")
66+
args = parser.parse_args()
67+
cache_folder = args.cache_folder
68+
69+
if args.annotations is None:
70+
annotation_paths = sorted(glob(os.path.join(ROOT, "**", "*.csv"), recursive=True))
71+
else:
72+
annotation_paths = args.annotations
73+
74+
for annotation_path in annotation_paths:
75+
visualize_anotation(annotation_path, cache_folder)
76+
77+
78+
if __name__ == "__main__":
79+
main()

scripts/validation/SGNs/analyze.py

Lines changed: 35 additions & 18 deletions
Original file line numberDiff line numberDiff line change
@@ -1,26 +1,43 @@
11
import argparse
22
import pandas as pd
33

4-
# TODO more logic to separate by annotator etc.
5-
# For now this is just a simple script for global eval
64

7-
parser = argparse.ArgumentParser()
8-
parser.add_argument("file")
9-
args = parser.parse_args()
5+
def compute_scores(table, annotator=None):
6+
if annotator is None:
7+
annotator = "all"
8+
else:
9+
table = table[table.annotator == annotator]
1010

11-
table = pd.read_csv(args.file)
12-
print("Table:")
13-
print(table)
11+
tp = table.tps.sum()
12+
fp = table.fps.sum()
13+
fn = table.fns.sum()
1414

15-
tp = table.tps.sum()
16-
fp = table.fps.sum()
17-
fn = table.fns.sum()
15+
precision = tp / (tp + fp)
16+
recall = tp / (tp + fn)
17+
f1_score = 2 * precision * recall / (precision + recall)
1818

19-
precision = tp / (tp + fp)
20-
recall = tp / (tp + fn)
21-
f1_score = 2 * precision * recall / (precision + recall)
19+
return pd.DataFrame({
20+
"annotator": [annotator], "precision": [precision], "recall": [recall], "f1-score": [f1_score]
21+
})
2222

23-
print("Evaluation:")
24-
print("Precision:", precision)
25-
print("Recall:", recall)
26-
print("F1-Score:", f1_score)
23+
24+
def main():
25+
parser = argparse.ArgumentParser()
26+
parser.add_argument("result_file")
27+
args = parser.parse_args()
28+
29+
table = pd.read_csv(args.result_file)
30+
annotators = pd.unique(table.annotator)
31+
32+
results = []
33+
for annotator in annotators:
34+
scores_annotator = compute_scores(table, annotator)
35+
results.append(scores_annotator)
36+
results.append(compute_scores(table, annotator=None))
37+
38+
results = pd.concat(results)
39+
print(results)
40+
41+
42+
if __name__ == "__main__":
43+
main()

scripts/validation/SGNs/compare_annotations.py

Lines changed: 41 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -1,25 +1,58 @@
11
import os
2+
from glob import glob
23

3-
import imageio.v3 as imageio
44
import napari
55
import pandas as pd
6+
import tifffile
67

7-
ROOT = "/mnt/vast-nhr/projects/nim00007/data/moser/cochlea-lightsheet/AnnotatedImageCrops/F1Validation"
8-
TEST_ANNOTATION = os.path.join(ROOT, "AnnotationsEK/MAMD58L_PV_z771_base_full_annotationsEK.csv")
8+
ROOT = "/mnt/vast-nhr/projects/nim00007/data/moser/cochlea-lightsheet/AnnotatedImageCrops/F1ValidationSGNs"
9+
ANNOTATION_FOLDERS = ["AnnotationsEK", "AnnotationsAMD", "AnnotationLR"]
10+
COLOR = ["green", "yellow", "orange"]
911

1012

11-
def check_annotation(image_path, annotation_path):
12-
annotations = pd.read_csv(annotation_path)[["axis-0", "axis-1", "axis-2"]].values
13+
def _match_annotations(image_path):
14+
prefix = os.path.basename(image_path).split("_")[:3]
15+
prefix = "_".join(prefix)
1316

14-
image = imageio.imread(image_path)
17+
annotations = {}
18+
for annotation_folder in ANNOTATION_FOLDERS:
19+
all_annotations = glob(os.path.join(ROOT, annotation_folder, "*.csv"))
20+
matches = [ann for ann in all_annotations if os.path.basename(ann).startswith(prefix)]
21+
if len(matches) != 1:
22+
continue
23+
annotation_path = matches[0]
24+
25+
annotation = pd.read_csv(annotation_path)[["axis-0", "axis-1", "axis-2"]].values
26+
annotations[annotation_folder] = annotation
27+
28+
return annotations
29+
30+
31+
def compare_annotations(image_path):
32+
annotations = _match_annotations(image_path)
33+
34+
image = tifffile.memmap(image_path)
1535
v = napari.Viewer()
1636
v.add_image(image)
17-
v.add_points(annotations)
37+
for i, (name, annotation) in enumerate(annotations.items()):
38+
v.add_points(annotation, name=name, face_color=COLOR[i])
39+
v.title = os.path.basename(image_path)
1840
napari.run()
1941

2042

2143
def main():
22-
check_annotation(os.path.join(ROOT, "MAMD58L_PV_z771_base_full.tif"), TEST_ANNOTATION)
44+
import argparse
45+
parser = argparse.ArgumentParser()
46+
parser.add_argument("--images", nargs="+")
47+
args = parser.parse_args()
48+
49+
if args.images is None:
50+
image_paths = sorted(glob(os.path.join(ROOT, "*.tif")))
51+
else:
52+
image_paths = args.images
53+
54+
for image_path in image_paths:
55+
compare_annotations(image_path)
2356

2457

2558
if __name__ == "__main__":

scripts/validation/SGNs/visualize_validation.py

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -29,7 +29,6 @@ def visualize_anotation(annotation_path, cache_folder):
2929
cache_path = None if cache_folder is None else os.path.join(cache_folder, f"{cochlea}_{slice_id}.tif")
3030

3131
image_path = _match_image_path(annotation_path)
32-
return
3332

3433
segmentation, annotations = fetch_data_for_evaluation(
3534
annotation_path, cache_path=cache_path, components_for_postprocessing=[1],
Lines changed: 44 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,44 @@
1+
import os
2+
3+
import imageio.v3 as imageio
4+
import numpy as np
5+
6+
ROOT = "/mnt/vast-nhr/projects/nim00007/data/moser/cochlea-lightsheet/croppings/226R_SGN_crop"
7+
IMAGE_PATH = os.path.join(ROOT, "M_LR_000226_R_crop_0802-1067-0776_PV.tif")
8+
SEG_PATH = os.path.join(ROOT, "M_LR_000226_R_crop_0802-1067-0776_SGN_v2.tif")
9+
NUC_PATH = os.path.join(ROOT, "M_LR_000226_R_crop_0802-1067-0776_NUCLEI.tif")
10+
11+
12+
def segment_nuclei():
13+
from flamingo_tools.segmentation.nucleus_segmentation import _naive_nucleus_segmentation_impl
14+
15+
image = imageio.imread(IMAGE_PATH)
16+
segmentation = imageio.imread(SEG_PATH)
17+
18+
nuclei = np.zeros_like(segmentation, dtype=segmentation.dtype)
19+
_naive_nucleus_segmentation_impl(image, segmentation, table=None, output=nuclei, n_threads=8, resolution=0.38)
20+
21+
imageio.imwrite(NUC_PATH, nuclei, compression="zlib")
22+
23+
24+
def check_segmentation():
25+
import napari
26+
27+
image = imageio.imread(IMAGE_PATH)
28+
segmentation = imageio.imread(SEG_PATH)
29+
nuclei = imageio.imread(NUC_PATH)
30+
31+
v = napari.Viewer()
32+
v.add_image(image)
33+
v.add_labels(segmentation)
34+
v.add_labels(nuclei)
35+
napari.run()
36+
37+
38+
def main():
39+
segment_nuclei()
40+
# check_segmentation()
41+
42+
43+
if __name__ == "__main__":
44+
main()

0 commit comments

Comments
 (0)