|
| 1 | +import multiprocessing as mp |
| 2 | +from concurrent import futures |
| 3 | +from typing import Optional |
| 4 | + |
| 5 | +import numpy as np |
| 6 | +import pandas as pd |
| 7 | +import trimesh |
| 8 | +from skimage.measure import marching_cubes |
| 9 | +from tqdm import tqdm |
| 10 | + |
| 11 | +from .file_utils import read_image_data |
| 12 | + |
| 13 | + |
| 14 | +def _measure_volume_and_surface(mask, resolution): |
| 15 | + # Use marching_cubes for 3D data |
| 16 | + verts, faces, normals, _ = marching_cubes(mask, spacing=(resolution,) * 3) |
| 17 | + |
| 18 | + mesh = trimesh.Trimesh(vertices=verts, faces=faces, vertex_normals=normals) |
| 19 | + surface = mesh.area |
| 20 | + if mesh.is_watertight: |
| 21 | + volume = np.abs(mesh.volume) |
| 22 | + else: |
| 23 | + volume = np.nan |
| 24 | + |
| 25 | + return volume, surface |
| 26 | + |
| 27 | + |
| 28 | +# Could also support s3 directly? |
| 29 | +def compute_object_measures( |
| 30 | + image_path: str, |
| 31 | + segmentation_path: str, |
| 32 | + segmentation_table_path: str, |
| 33 | + output_table_path: str, |
| 34 | + image_key: Optional[str] = None, |
| 35 | + segmentation_key: Optional[str] = None, |
| 36 | + n_threads: Optional[int] = None, |
| 37 | + resolution: float = 0.38, |
| 38 | +): |
| 39 | + """ |
| 40 | +
|
| 41 | + Args: |
| 42 | + image_path: |
| 43 | + segmentation_path: |
| 44 | + segmentation_table_path: |
| 45 | + output_table_path: |
| 46 | + image_key: |
| 47 | + segmentation_key: |
| 48 | + n_threads: |
| 49 | + resolution: |
| 50 | + """ |
| 51 | + # First, we load the pre-computed segmentation table from MoBIE. |
| 52 | + table = pd.read_csv(segmentation_table_path, sep="\t") |
| 53 | + |
| 54 | + # Then, open the volumes. |
| 55 | + image = read_image_data(image_path, image_key) |
| 56 | + segmentation = read_image_data(segmentation_path, segmentation_key) |
| 57 | + |
| 58 | + def intensity_measures(seg_id): |
| 59 | + # Get the bounding box. |
| 60 | + row = table[table.label_id == seg_id] |
| 61 | + |
| 62 | + bb_min = np.array([ |
| 63 | + row.bb_min_z.item(), row.bb_min_y.item(), row.bb_min_x.item() |
| 64 | + ]) / resolution |
| 65 | + bb_min = np.round(bb_min, 0).astype("uint32") |
| 66 | + |
| 67 | + bb_max = np.array([ |
| 68 | + row.bb_max_z.item(), row.bb_max_y.item(), row.bb_max_x.item() |
| 69 | + ]) / resolution |
| 70 | + bb_max = np.round(bb_max, 0).astype("uint32") |
| 71 | + |
| 72 | + bb = tuple( |
| 73 | + slice(max(bmin - 1, 0), min(bmax + 1, sh)) |
| 74 | + for bmin, bmax, sh in zip(bb_min, bb_max, image.shape) |
| 75 | + ) |
| 76 | + |
| 77 | + local_image = image[bb] |
| 78 | + mask = segmentation[bb] == seg_id |
| 79 | + masked_intensity = local_image[mask] |
| 80 | + |
| 81 | + # Do the base intensity measurements. |
| 82 | + measures = { |
| 83 | + "label_id": seg_id, |
| 84 | + "mean": np.mean(masked_intensity), |
| 85 | + "stdev": np.std(masked_intensity), |
| 86 | + "min": np.min(masked_intensity), |
| 87 | + "max": np.max(masked_intensity), |
| 88 | + "median": np.median(masked_intensity), |
| 89 | + } |
| 90 | + for percentile in (5, 10, 25, 75, 90, 95): |
| 91 | + measures[f"percentile-{percentile}"] = np.percentile(masked_intensity, percentile) |
| 92 | + |
| 93 | + # Do the volume and surface measurement. |
| 94 | + volume, surface = _measure_volume_and_surface(mask, resolution) |
| 95 | + measures["volume"] = volume |
| 96 | + measures["surface"] = surface |
| 97 | + return measures |
| 98 | + |
| 99 | + seg_ids = table.label_id.values |
| 100 | + n_threads = mp.cpu_count() if n_threads is None else n_threads |
| 101 | + with futures.ThreadPoolExecutor(n_threads) as pool: |
| 102 | + measures = list(tqdm( |
| 103 | + pool.map(intensity_measures, seg_ids), |
| 104 | + total=len(seg_ids), desc="Compute intensity measures" |
| 105 | + )) |
| 106 | + |
| 107 | + # Create the result table and save it. |
| 108 | + keys = measures[0].keys() |
| 109 | + measures = pd.DataFrame({k: [measure[k] for measure in measures] for k in keys}) |
| 110 | + measures.to_csv(output_table_path, sep="\t", index=False) |
0 commit comments