Skip to content

Commit 8ea5b66

Browse files
committed
more conservative SV filtering at z borders
1 parent fbe4a55 commit 8ea5b66

File tree

3 files changed

+29
-8
lines changed

3 files changed

+29
-8
lines changed

run_sbatch_revision.sbatch

Lines changed: 7 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -1,15 +1,16 @@
11
#! /bin/bash
22
#SBATCH -c 4 #4 #8
3-
#SBATCH --mem 120G #120G #32G #64G #256G
3+
#SBATCH --mem 256G #120G #32G #64G #256G
44
#SBATCH -p grete:shared #grete:shared #grete-h100:shared
5-
#SBATCH -t 4:00:00 #6:00:00 #48:00:00
5+
#SBATCH -t 3:00:00 #6:00:00 #48:00:00
66
#SBATCH -G A100:1 #V100:1 #2 #A100:1 #gtx1080:2 #v100:1 #H100:1
77
#SBATCH --output=/user/muth9/u12095/synapse-net/slurm_revision/slurm-%j.out
8-
#SBATCH -A nim00007 #SBATCH --constraint 80gb
8+
#SBATCH -A nim00007
9+
#SBATCH --constraint 80gb
910

1011
source ~/.bashrc
1112
conda activate synapse-net
1213
python /user/muth9/u12095/synapse-net/scripts/cooper/revision/updated_data_analysis/run_data_analysis.py \
13-
-i /mnt/lustre-emmy-hdd/projects/nim00007/data/synaptic-reconstruction/cooper/20241102_TOMO_DATA_Imig2014/exported/SNAP25/ \
14-
-o /mnt/lustre-emmy-hdd/projects/nim00007/data/synaptic-reconstruction/cooper/20241102_TOMO_DATA_Imig2014/afterRevision_analysis/boundaryT0_9_constantins_presynapticFiltering --store \
15-
-s ./analysis_results/man_subset
14+
-i /mnt/lustre-emmy-hdd/projects/nim00007/data/synaptic-reconstruction/cooper/20241102_TOMO_DATA_Imig2014/exported/SNAP25 \
15+
-o /mnt/lustre-emmy-hdd/projects/nim00007/data/synaptic-reconstruction/cooper/20241102_TOMO_DATA_Imig2014/afterRevision_analysis/boundaryT0_9_constantins_presynapticFiltering/weaker_SVfilter/man_subset --store \
16+
-s ./analysis_results/weaker_SVfilter/man_subset

scripts/cooper/revision/updated_data_analysis/analysis_segmentations.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -71,7 +71,7 @@ def SV_pred(raw: np.ndarray, SV_model: str, output_path: str = None, store: bool
7171

7272
if not use_existing_seg:
7373
#Excluding boundary SV, because they would also not be used in the manual annotation
74-
seg, pred = segment_vesicles(input_volume=raw, model_path=SV_model, exclude_boundary=True, verbose=False, return_predictions=True)
74+
seg, pred = segment_vesicles(input_volume=raw, model_path=SV_model, exclude_boundary_vesicles=True, verbose=False, return_predictions=True)
7575

7676
if store and output_path:
7777
with h5py.File(output_path, "a") as f:

synapse_net/inference/vesicles.py

Lines changed: 21 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -7,7 +7,7 @@
77
import torch
88

99
from synapse_net.inference.util import apply_size_filter, get_prediction, _Scaler
10-
from synapse_net.inference.postprocessing.vesicles import filter_border_objects
10+
from synapse_net.inference.postprocessing.vesicles import filter_border_objects, filter_border_vesicles
1111

1212

1313
def distance_based_vesicle_segmentation(
@@ -132,6 +132,7 @@ def segment_vesicles(
132132
return_predictions: bool = False,
133133
scale: Optional[List[float]] = None,
134134
exclude_boundary: bool = False,
135+
exclude_boundary_vesicles: bool = False,
135136
mask: Optional[np.ndarray] = None,
136137
) -> Union[np.ndarray, Tuple[np.ndarray, np.ndarray]]:
137138
"""Segment vesicles in an input volume or image.
@@ -182,6 +183,25 @@ def segment_vesicles(
182183

183184
if exclude_boundary:
184185
seg = filter_border_objects(seg)
186+
if exclude_boundary_vesicles:
187+
seg_ids = filter_border_vesicles(seg)
188+
# Step 1: Zero out everything not in seg_ids
189+
seg[~np.isin(seg, seg_ids)] = 0
190+
191+
# Step 2: Relabel remaining IDs to be consecutive starting from 1
192+
unique_ids = np.unique(seg)
193+
unique_ids = unique_ids[unique_ids != 0] # Exclude background (0)
194+
195+
label_map = {old_label: new_label for new_label, old_label in enumerate(unique_ids, start=1)}
196+
197+
# Apply relabeling using a temp array (to avoid large ints in-place)
198+
new_seg = np.zeros_like(seg, dtype=np.int32)
199+
for old_label, new_label in label_map.items():
200+
new_seg[seg == old_label] = new_label
201+
202+
# Final step: replace original seg with relabelled and casted version
203+
seg = new_seg
204+
185205
seg = scaler.rescale_output(seg, is_segmentation=True)
186206

187207
if return_predictions:

0 commit comments

Comments
 (0)