|
| 1 | +import os |
| 2 | +from glob import glob |
| 3 | + |
| 4 | +import napari |
| 5 | +import pandas as pd |
| 6 | +import tifffile |
| 7 | + |
| 8 | +ROOT = "/mnt/vast-nhr/projects/nim00007/data/moser/cochlea-lightsheet/AnnotatedImageCrops/F1ValidationIHCs" |
| 9 | +# ANNOTATION_FOLDERS = ["AnnotationsEK", "AnnotationsAMD", "AnnotationsLR"] |
| 10 | +ANNOTATION_FOLDERS = ["Annotations_AMD", "Annotations_LR"] |
| 11 | +COLOR = ["green", "yellow", "orange"] |
| 12 | + |
| 13 | + |
| 14 | +def _match_annotations(image_path): |
| 15 | + prefix = os.path.basename(image_path).split("_")[:3] |
| 16 | + prefix = "_".join(prefix) |
| 17 | + |
| 18 | + annotations = {} |
| 19 | + for annotation_folder in ANNOTATION_FOLDERS: |
| 20 | + all_annotations = glob(os.path.join(ROOT, annotation_folder, "*.csv")) |
| 21 | + matches = [ann for ann in all_annotations if os.path.basename(ann).startswith(prefix)] |
| 22 | + if len(matches) == 0: |
| 23 | + continue |
| 24 | + assert len(matches) == 1 |
| 25 | + annotation_path = matches[0] |
| 26 | + |
| 27 | + annotation = pd.read_csv(annotation_path)[["axis-0", "axis-1", "axis-2"]].values |
| 28 | + annotations[annotation_folder] = annotation |
| 29 | + |
| 30 | + return annotations |
| 31 | + |
| 32 | + |
| 33 | +def compare_annotations(image_path): |
| 34 | + annotations = _match_annotations(image_path) |
| 35 | + |
| 36 | + image = tifffile.memmap(image_path) |
| 37 | + v = napari.Viewer() |
| 38 | + v.add_image(image) |
| 39 | + for i, (name, annotation) in enumerate(annotations.items()): |
| 40 | + v.add_points(annotation, name=name, face_color=COLOR[i]) |
| 41 | + v.title = os.path.basename(image_path) |
| 42 | + napari.run() |
| 43 | + |
| 44 | + |
| 45 | +def visualize(image_paths): |
| 46 | + for image_path in image_paths: |
| 47 | + compare_annotations(image_path) |
| 48 | + |
| 49 | + |
| 50 | +def check_annotations(image_paths): |
| 51 | + annotation_status = {"file": []} |
| 52 | + annotation_status.update({ann: [] for ann in ANNOTATION_FOLDERS}) |
| 53 | + for image_path in image_paths: |
| 54 | + annotations = _match_annotations(image_path) |
| 55 | + annotation_status["file"].append(os.path.basename(image_path)) |
| 56 | + for ann in ANNOTATION_FOLDERS: |
| 57 | + annotation_status[ann].append("Yes" if ann in annotations else "No") |
| 58 | + annotation_status = pd.DataFrame(annotation_status) |
| 59 | + print(annotation_status) |
| 60 | + |
| 61 | + |
| 62 | +def main(): |
| 63 | + import argparse |
| 64 | + parser = argparse.ArgumentParser() |
| 65 | + parser.add_argument("--images", nargs="+") |
| 66 | + parser.add_argument("--check", action="store_true") |
| 67 | + args = parser.parse_args() |
| 68 | + |
| 69 | + if args.images is None: |
| 70 | + image_paths = sorted(glob(os.path.join(ROOT, "*.tif"))) |
| 71 | + else: |
| 72 | + image_paths = args.images |
| 73 | + |
| 74 | + if args.check: |
| 75 | + check_annotations(image_paths) |
| 76 | + else: |
| 77 | + visualize(image_paths) |
| 78 | + |
| 79 | + |
| 80 | +if __name__ == "__main__": |
| 81 | + main() |
0 commit comments