|
| 1 | +import argparse |
| 2 | +import os |
| 3 | +from glob import glob |
| 4 | + |
| 5 | +import napari |
| 6 | +import tifffile |
| 7 | + |
| 8 | +from flamingo_tools.validation import ( |
| 9 | + fetch_data_for_evaluation, compute_matches_for_annotated_slice, for_visualization, parse_annotation_path |
| 10 | +) |
| 11 | + |
| 12 | +ROOT = "/mnt/vast-nhr/projects/nim00007/data/moser/cochlea-lightsheet/AnnotatedImageCrops/F1ValidationIHCs" |
| 13 | + |
| 14 | + |
| 15 | +def _match_image_path(annotation_path): |
| 16 | + all_files = glob(os.path.join(ROOT, "*.tif")) |
| 17 | + prefix = os.path.basename(annotation_path).split("_")[:-3] |
| 18 | + prefix = "_".join(prefix) |
| 19 | + matches = [path for path in all_files if os.path.basename(path).startswith(prefix)] |
| 20 | + # if len(matches) != 1: |
| 21 | + # breakpoint() |
| 22 | + assert len(matches) == 1, f"{prefix}: {len(matches)}" |
| 23 | + return matches[0] |
| 24 | + |
| 25 | + |
| 26 | +def visualize_anotation(annotation_path, cache_folder): |
| 27 | + print("Checking", annotation_path) |
| 28 | + cochlea, slice_id = parse_annotation_path(annotation_path) |
| 29 | + cache_path = None if cache_folder is None else os.path.join(cache_folder, f"{cochlea}_{slice_id}.tif") |
| 30 | + |
| 31 | + image_path = _match_image_path(annotation_path) |
| 32 | + |
| 33 | + component = 2 if "226_R" in cochlea else 1 |
| 34 | + segmentation, annotations = fetch_data_for_evaluation( |
| 35 | + annotation_path, cache_path=cache_path, components_for_postprocessing=[component], seg_name="IHC_v2", |
| 36 | + ) |
| 37 | + |
| 38 | + image = tifffile.memmap(image_path) |
| 39 | + if segmentation.ndim == 2: |
| 40 | + image = image[image.shape[0] // 2] |
| 41 | + assert image.shape == segmentation.shape, f"{image.shape}, {segmentation.shape}" |
| 42 | + |
| 43 | + matches = compute_matches_for_annotated_slice(segmentation, annotations, matching_tolerance=5) |
| 44 | + vis_segmentation, vis_points, seg_props, point_props = for_visualization(segmentation, annotations, matches) |
| 45 | + |
| 46 | + # tps, fns = matches["tp_annotations"], matches["fn"] |
| 47 | + # print("True positive annotations:") |
| 48 | + # print(tps) |
| 49 | + # print("False negative annotations:") |
| 50 | + # print(fns) |
| 51 | + |
| 52 | + v = napari.Viewer() |
| 53 | + v.add_image(image) |
| 54 | + v.add_labels(vis_segmentation, **seg_props) |
| 55 | + v.add_points(vis_points, **point_props) |
| 56 | + v.add_labels(segmentation, visible=False) |
| 57 | + v.add_points(annotations, visible=False) |
| 58 | + v.title = os.path.relpath(annotation_path, ROOT) |
| 59 | + napari.run() |
| 60 | + |
| 61 | + |
| 62 | +def main(): |
| 63 | + parser = argparse.ArgumentParser() |
| 64 | + parser.add_argument("--annotations", nargs="+") |
| 65 | + parser.add_argument("--cache_folder") |
| 66 | + args = parser.parse_args() |
| 67 | + cache_folder = args.cache_folder |
| 68 | + |
| 69 | + if args.annotations is None: |
| 70 | + annotation_paths = sorted(glob(os.path.join(ROOT, "**", "*.csv"), recursive=True)) |
| 71 | + else: |
| 72 | + annotation_paths = args.annotations |
| 73 | + |
| 74 | + for annotation_path in annotation_paths: |
| 75 | + visualize_anotation(annotation_path, cache_folder) |
| 76 | + |
| 77 | + |
| 78 | +if __name__ == "__main__": |
| 79 | + main() |
0 commit comments