|
7 | 7 | import torch |
8 | 8 |
|
9 | 9 | from synapse_net.inference.util import apply_size_filter, get_prediction, _Scaler |
10 | | -from synapse_net.inference.postprocessing.vesicles import filter_border_objects |
| 10 | +from synapse_net.inference.postprocessing.vesicles import filter_border_objects, filter_border_vesicles |
11 | 11 |
|
12 | 12 |
|
13 | 13 | def distance_based_vesicle_segmentation( |
@@ -132,6 +132,7 @@ def segment_vesicles( |
132 | 132 | return_predictions: bool = False, |
133 | 133 | scale: Optional[List[float]] = None, |
134 | 134 | exclude_boundary: bool = False, |
| 135 | + exclude_boundary_vesicles: bool = False, |
135 | 136 | mask: Optional[np.ndarray] = None, |
136 | 137 | ) -> Union[np.ndarray, Tuple[np.ndarray, np.ndarray]]: |
137 | 138 | """Segment vesicles in an input volume or image. |
@@ -182,6 +183,25 @@ def segment_vesicles( |
182 | 183 |
|
183 | 184 | if exclude_boundary: |
184 | 185 | seg = filter_border_objects(seg) |
| 186 | + if exclude_boundary_vesicles: |
| 187 | + seg_ids = filter_border_vesicles(seg) |
| 188 | + # Step 1: Zero out everything not in seg_ids |
| 189 | + seg[~np.isin(seg, seg_ids)] = 0 |
| 190 | + |
| 191 | + # Step 2: Relabel remaining IDs to be consecutive starting from 1 |
| 192 | + unique_ids = np.unique(seg) |
| 193 | + unique_ids = unique_ids[unique_ids != 0] # Exclude background (0) |
| 194 | + |
| 195 | + label_map = {old_label: new_label for new_label, old_label in enumerate(unique_ids, start=1)} |
| 196 | + |
| 197 | + # Apply relabeling using a temp array (to avoid large ints in-place) |
| 198 | + new_seg = np.zeros_like(seg, dtype=np.int32) |
| 199 | + for old_label, new_label in label_map.items(): |
| 200 | + new_seg[seg == old_label] = new_label |
| 201 | + |
| 202 | + # Final step: replace original seg with relabelled and casted version |
| 203 | + seg = new_seg |
| 204 | + |
185 | 205 | seg = scaler.rescale_output(seg, is_segmentation=True) |
186 | 206 |
|
187 | 207 | if return_predictions: |
|
0 commit comments