Skip to content

Commit 25f7d73

Browse files
Merge pull request #66 from computational-cell-analytics/templates_for_segmentation
Templates for SGN segmentation
2 parents ff84404 + 0608f8d commit 25f7d73

19 files changed

+776
-1
lines changed

flamingo_tools/data_conversion.py

Lines changed: 5 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -10,7 +10,11 @@
1010
import numpy as np
1111
import pybdv
1212

13-
from cluster_tools.utils.volume_utils import write_format_metadata
13+
try:
14+
from cluster_tools.utils.volume_utils import write_format_metadata
15+
except ImportError:
16+
write_format_metadata = None
17+
1418
from elf.io import open_file
1519
from skimage.transform import rescale
1620

Lines changed: 18 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,18 @@
1+
# Segmentation and detection workflows
2+
3+
Implements workflows to segment SGNs or IHCs, and to detect ribbon synapses in slurm.
4+
5+
For SGN segmentation run:
6+
- mean_std_SGN_template.sbatch
7+
- apply_unet_SGN_template.sbatch
8+
- segment_unet_SGN_template.sbatch
9+
10+
For IHC segmentation run:
11+
- mean_std_IHC_template.sbatch
12+
- apply_unet_IHC_template.sbatch
13+
- segment_unet_IHC_template.sbatch
14+
15+
For ribbon synapse detection without associated IHC segmentation run
16+
- detect_synapse_template.sbatch
17+
For ribbon synapse detection with associated IHC segmentation run
18+
- detect_synapse_marker_template.sbatch
Lines changed: 60 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,60 @@
1+
#!/bin/bash
2+
#SBATCH --job-name=apply-unet-IHC
3+
#SBATCH -t 02:00:00 # for gerbil up to ~3 hours
4+
5+
#SBATCH -p grete:shared # the partition
6+
#SBATCH -G A100:1 # For requesting 1 A100 GPU.
7+
#SBATCH -c 1
8+
#SBATCH --mem 24G
9+
#SBATCH -a 0-9
10+
11+
source ~/.bashrc
12+
# micromamba activate micro-sam_gpu
13+
micromamba activate sam
14+
15+
# Print out some info.
16+
echo "Submitting job with sbatch from directory: ${SLURM_SUBMIT_DIR}"
17+
echo "Home directory: ${HOME}"
18+
echo "Working directory: $PWD"
19+
echo "Current node: ${SLURM_NODELIST}"
20+
21+
# Run the script
22+
23+
# SCRIPT_REPO=/user/schilling40/u15000/flamingo-tools
24+
SCRIPT_REPO=/user/pape41/u12086/Work/my_projects/flamingo-tools
25+
cd "$SCRIPT_REPO"/flamingo_tools/segmentation/ || exit
26+
27+
export SCRIPT_DIR=$SCRIPT_REPO/scripts
28+
29+
# name of cochlea, as it appears in MoBIE and the NHR
30+
COCHLEA=$1
31+
# data in n5 format, e.g. GEK11L_PV_GFP_01_fused.n5
32+
DATA=$2
33+
# channel in n5 folder, e.g. 0, 1, or 2
34+
STAIN_CHANNEL=$3
35+
# segmentation name, as it appears in MoBIE, e.g. IHC_v4
36+
SEG_NAME=$4
37+
38+
export INPUT=/mnt/vast-nhr/projects/nim00007/data/moser/cochlea-lightsheet/"$COCHLEA"/"$DATA"
39+
40+
export OUTPUT_FOLDER=/mnt/vast-nhr/projects/nim00007/data/moser/cochlea-lightsheet/predictions/"$COCHLEA"/"$SEG_NAME"
41+
42+
# export MODEL=/mnt/vast-nhr/projects/nim00007/data/moser/cochlea-lightsheet/trained_models/IHC/v4_cochlea_distance_unet_IHC_supervised_2025-07-14
43+
export MODEL=/mnt/vast-nhr/projects/nim00007/data/moser/cochlea-lightsheet/trained_models/IHC/v5_cochlea_distance_unet_IHC_supervised_2025-08-20/
44+
export PREDICTION_INSTANCES=10
45+
export INPUT_KEY="setup$STAIN_CHANNEL/timepoint0/s0"
46+
47+
echo "Input directory: ${INPUT}"
48+
echo "Output directory: ${OUTPUT_FOLDER}"
49+
echo "Model: ${MODEL}"
50+
51+
cmd_array=( 'import sys,os;'
52+
'sys.path.insert(0,os.environ["SCRIPT_DIR"]);'
53+
'import unet_prediction;'
54+
'unet_prediction.run_unet_prediction_slurm(input_path=os.environ["INPUT"],'
55+
'output_folder=os.environ["OUTPUT_FOLDER"],model_path=os.environ["MODEL"],'
56+
'input_key=os.environ["INPUT_KEY"],'
57+
'prediction_instances=os.environ["PREDICTION_INSTANCES"])')
58+
cmd="${cmd_array[*]}"
59+
python -c "$cmd"
60+
Lines changed: 57 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,57 @@
1+
#!/bin/bash
2+
#SBATCH --job-name=apply-unet-SGN
3+
#SBATCH -t 02:00:00 # for gerbil up to ~3 hours
4+
5+
#SBATCH -p grete:shared # the partition
6+
#SBATCH -G A100:1 # For requesting 1 A100 GPU.
7+
#SBATCH -c 1
8+
#SBATCH --mem 24G
9+
#SBATCH -a 0-9
10+
11+
source ~/.bashrc
12+
micromamba activate micro-sam_gpu
13+
14+
# Print out some info.
15+
echo "Submitting job with sbatch from directory: ${SLURM_SUBMIT_DIR}"
16+
echo "Home directory: ${HOME}"
17+
echo "Working directory: $PWD"
18+
echo "Current node: ${SLURM_NODELIST}"
19+
20+
# Run the script
21+
22+
SCRIPT_REPO=/user/schilling40/u15000/flamingo-tools
23+
cd "$SCRIPT_REPO"/flamingo_tools/segmentation/ || exit
24+
25+
export SCRIPT_DIR=$SCRIPT_REPO/scripts
26+
27+
# name of cochlea, as it appears in MoBIE and the NHR
28+
COCHLEA=$1
29+
# data in n5 format, e.g. GEK11L_PV_GFP_01_fused.n5
30+
DATA=$2
31+
# channel in n5 folder, e.g. 0, 1, or 2
32+
STAIN_CHANNEL=$3
33+
# segmentation name, as it appears in MoBIE, e.g. SGN_v2 or Calb1_SGN_v2
34+
SEG_NAME=$4
35+
36+
export INPUT=/mnt/vast-nhr/projects/nim00007/data/moser/cochlea-lightsheet/"$COCHLEA"/"$DATA"
37+
38+
export OUTPUT_FOLDER=/mnt/vast-nhr/projects/nim00007/data/moser/cochlea-lightsheet/predictions/"$COCHLEA"/"$SEG_NAME"
39+
40+
export MODEL=/mnt/vast-nhr/projects/nim00007/data/moser/cochlea-lightsheet/trained_models/SGN/v2_cochlea_distance_unet_SGN_supervised_2025-05-27
41+
export PREDICTION_INSTANCES=10
42+
export INPUT_KEY="setup$STAIN_CHANNEL/timepoint0/s0"
43+
44+
echo "Input directory: ${INPUT}"
45+
echo "Output directory: ${OUTPUT_FOLDER}"
46+
echo "Model: ${MODEL}"
47+
48+
cmd_array=( 'import sys,os;'
49+
'sys.path.insert(0,os.environ["SCRIPT_DIR"]);'
50+
'import unet_prediction;'
51+
'unet_prediction.run_unet_prediction_slurm(input_path=os.environ["INPUT"],'
52+
'output_folder=os.environ["OUTPUT_FOLDER"],model_path=os.environ["MODEL"],'
53+
'input_key=os.environ["INPUT_KEY"],'
54+
'prediction_instances=os.environ["PREDICTION_INSTANCES"])')
55+
cmd="${cmd_array[*]}"
56+
python -c "$cmd"
57+
Lines changed: 57 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,57 @@
1+
#!/bin/bash
2+
#SBATCH --job-name=synapse-marker
3+
#SBATCH -t 08:00:00 # estimated time, adapt to your needs
4+
#SBATCH [email protected] # change this to your mailaddress
5+
#SBATCH --mail-type=FAIL # send mail when job begins and ends
6+
7+
#SBATCH -p grete:shared # the partition
8+
#SBATCH -G A100:1 # For requesting 1 A100 GPU.
9+
#SBATCH -A nim00007
10+
#SBATCH -c 2
11+
#SBATCH --mem 36G
12+
13+
source ~/.bashrc
14+
micromamba activate micro-sam_gpu
15+
16+
# Print out some info.
17+
echo "Submitting job with sbatch from directory: ${SLURM_SUBMIT_DIR}"
18+
echo "Home directory: ${HOME}"
19+
echo "Working directory: $PWD"
20+
echo "Current node: ${SLURM_NODELIST}"
21+
22+
# Run the script
23+
#python myprogram.py $SLURM_ARRAY_TASK_ID
24+
25+
SCRIPT_REPO=/user/schilling40/u15000/flamingo-tools
26+
cd "$SCRIPT_REPO"/flamingo_tools/segmentation/ || exit
27+
28+
export SCRIPT_DIR=$SCRIPT_REPO/scripts
29+
30+
# name of cochlea, as it appears in MoBIE and the NHR
31+
COCHLEA=$1
32+
# image channel, e.g. CTBP2 or RibA
33+
IMAGE_CHANNEL=$2
34+
# segmentation name, as it appears in MoBIE, e.g. synapses_v3
35+
IHC_SEG=$3
36+
37+
export INPUT_PATH="$COCHLEA"/images/ome-zarr/"$IMAGE_CHANNEL".ome.zarr
38+
export MASK_PATH="$COCHLEA"/images/ome-zarr/"$IHC_SEG".ome.zarr
39+
40+
export OUTPUT_FOLDER=/mnt/vast-nhr/projects/nim00007/data/moser/cochlea-lightsheet/predictions/"$COCHLEA"/synapses_v3_"$IHC_SEG"
41+
42+
export MODEL=/mnt/vast-nhr/projects/nim00007/data/moser/cochlea-lightsheet/trained_models/Synapses/synapse_detection_model_v3.pt
43+
export INPUT_KEY="s0"
44+
export MAX_DISTANCE=8
45+
46+
echo "OUTPUT_FOLDER $OUTPUT_FOLDER"
47+
echo "MODEL $MODEL"
48+
49+
python ~/flamingo-tools/scripts/synapse_marker_detection/marker_detection.py \
50+
--input "$INPUT_PATH" \
51+
--input_key $INPUT_KEY \
52+
--output_folder "$OUTPUT_FOLDER" \
53+
--mask "$MASK_PATH" \
54+
--model $MODEL \
55+
--max_distance $MAX_DISTANCE \
56+
--s3
57+
Lines changed: 61 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,61 @@
1+
#!/bin/bash
2+
#SBATCH --job-name=synapse-detect
3+
#SBATCH -t 42:00:00 # estimated time, adapt to your needs
4+
#SBATCH [email protected] # change this to your mailaddress
5+
#SBATCH --mail-type=FAIL # send mail when job begins and ends
6+
7+
#SBATCH -p grete:shared # the partition
8+
#SBATCH -G A100:1 # For requesting 1 A100 GPU.
9+
#SBATCH -A nim00007
10+
#SBATCH -c 2
11+
#SBATCH --mem 500G
12+
13+
source ~/.bashrc
14+
micromamba activate micro-sam_gpu
15+
16+
# Print out some info.
17+
echo "Submitting job with sbatch from directory: ${SLURM_SUBMIT_DIR}"
18+
echo "Home directory: ${HOME}"
19+
echo "Working directory: $PWD"
20+
echo "Current node: ${SLURM_NODELIST}"
21+
22+
# Run the script
23+
#python myprogram.py $SLURM_ARRAY_TASK_ID
24+
25+
SCRIPT_REPO=/user/schilling40/u15000/flamingo-tools
26+
cd "$SCRIPT_REPO"/flamingo_tools/segmentation/ || exit
27+
28+
export SCRIPT_DIR=$SCRIPT_REPO/scripts
29+
30+
# name of cochlea, as it appears in MoBIE and the NHR
31+
COCHLEA=$1
32+
# image channel, e.g. CTBP2 or RibA
33+
IMAGE_CHANNEL=$2
34+
# segmentation name, as it appears in MoBIE, e.g. synapses_v3
35+
IHC_SEG=$3
36+
37+
export INPUT_PATH="$COCHLEA"/images/ome-zarr/"$IMAGE_CHANNEL".ome.zarr
38+
export MASK_PATH="$COCHLEA"/images/ome-zarr/"$IHC_SEG".ome.zarr
39+
40+
# data on NHR
41+
# export INPUT_PATH=/mnt/vast-nhr/projects/nim00007/data/moser/cochlea-lightsheet/"$COCHLEA"/"$DATA"
42+
# export INPUT_KEY="setup$STAIN_CHANNEL/timepoint0/s0"
43+
44+
export OUTPUT_FOLDER=/mnt/vast-nhr/projects/nim00007/data/moser/cochlea-lightsheet/predictions/"$COCHLEA"/synapses_v3
45+
46+
if ! [[ -f $OUTPUT_FOLDER ]] ; then
47+
mkdir -p "$OUTPUT_FOLDER"
48+
fi
49+
50+
export MODEL=/mnt/vast-nhr/projects/nim00007/data/moser/cochlea-lightsheet/trained_models/Synapses/synapse_detection_model_v3.pt
51+
52+
echo "OUTPUT_FOLDER $OUTPUT_FOLDER"
53+
echo "MODEL $MODEL"
54+
55+
python ~/flamingo-tools/scripts/synapse_marker_detection/run_prediction.py \
56+
--input "$INPUT_PATH" \
57+
--input_key "$INPUT_KEY" \
58+
--output_folder "$OUTPUT_FOLDER" \
59+
--model $MODEL \
60+
--s3
61+
Lines changed: 51 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,51 @@
1+
#!/bin/bash
2+
#SBATCH --job-name=mean-std-IHC
3+
#SBATCH -t 01:00:00 # usually takes ~20 min
4+
5+
#SBATCH -p standard96s:shared # the partition
6+
#SBATCH -A nim00007
7+
#SBATCH -c 3
8+
#SBATCH --mem 128G
9+
10+
source ~/.bashrc
11+
# micromamba activate flamingo13
12+
micromamba activate sam
13+
14+
# Run the script
15+
16+
# SCRIPT_REPO=/user/schilling40/u15000/flamingo-tools
17+
SCRIPT_REPO=/user/pape41/u12086/Work/my_projects/flamingo-tools
18+
cd "$SCRIPT_REPO"/flamingo_tools/segmentation/ || exit
19+
20+
export SCRIPT_DIR=$SCRIPT_REPO/scripts
21+
22+
# name of cochlea, as it appears in MoBIE and the NHR
23+
COCHLEA=$1
24+
# data in n5 format, e.g. GEK11L_PV_GFP_01_fused.n5
25+
DATA=$2
26+
# channel in n5 folder, e.g. 0, 1, or 2
27+
STAIN_CHANNEL=$3
28+
# segmentation name, as it appears in MoBIE, e.g. IHC_v4
29+
SEG_NAME=$4
30+
31+
export INPUT=/mnt/vast-nhr/projects/nim00007/data/moser/cochlea-lightsheet/"$COCHLEA"/"$DATA"
32+
export OUTPUT_FOLDER=/mnt/vast-nhr/projects/nim00007/data/moser/cochlea-lightsheet/predictions/"$COCHLEA"/"$SEG_NAME"
33+
export SEG_CLASS="ihc"
34+
export INPUT_KEY="setup$STAIN_CHANNEL/timepoint0/s0"
35+
36+
if ! [[ -f $OUTPUT_FOLDER ]] ; then
37+
mkdir -p "$OUTPUT_FOLDER"
38+
fi
39+
40+
echo "Input directory: ${INPUT}"
41+
echo "Output directory: ${OUTPUT_FOLDER}"
42+
43+
cmd_array=( 'import sys,os;'
44+
'sys.path.insert(0,os.environ["SCRIPT_DIR"]);'
45+
'import unet_prediction;'
46+
'unet_prediction.run_unet_prediction_preprocess_slurm(input_path=os.environ["INPUT"],'
47+
'input_key=os.environ["INPUT_KEY"],'
48+
'output_folder=os.environ["OUTPUT_FOLDER"],seg_class=os.environ["SEG_CLASS"])')
49+
cmd="${cmd_array[*]}"
50+
python -c "$cmd"
51+
Lines changed: 49 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,49 @@
1+
#!/bin/bash
2+
#SBATCH --job-name=mean-std-SGN
3+
#SBATCH -t 01:00:00 # usually takes ~20 min
4+
5+
#SBATCH -p standard96s:shared # the partition
6+
#SBATCH -A nim00007
7+
#SBATCH -c 3
8+
#SBATCH --mem 128G
9+
10+
source ~/.bashrc
11+
micromamba activate flamingo13
12+
13+
# Run the script
14+
15+
SCRIPT_REPO=/user/schilling40/u15000/flamingo-tools
16+
cd "$SCRIPT_REPO"/flamingo_tools/segmentation/ || exit
17+
18+
export SCRIPT_DIR=$SCRIPT_REPO/scripts
19+
20+
# name of cochlea, as it appears in MoBIE and the NHR
21+
COCHLEA=$1
22+
# data in n5 format, e.g. GEK11L_PV_GFP_01_fused.n5
23+
DATA=$2
24+
# channel in n5 folder, e.g. 0, 1, or 2
25+
STAIN_CHANNEL=$3
26+
# segmentation name, as it appears in MoBIE, e.g. SGN_v2 or Calb1_SGN_v2
27+
SEG_NAME=$4
28+
29+
export INPUT=/mnt/vast-nhr/projects/nim00007/data/moser/cochlea-lightsheet/"$COCHLEA"/"$DATA"
30+
export OUTPUT_FOLDER=/mnt/vast-nhr/projects/nim00007/data/moser/cochlea-lightsheet/predictions/"$COCHLEA"/"$SEG_NAME"
31+
export SEG_CLASS="sgn"
32+
export INPUT_KEY="setup$STAIN_CHANNEL/timepoint0/s0"
33+
34+
if ! [[ -f $OUTPUT_FOLDER ]] ; then
35+
mkdir -p "$OUTPUT_FOLDER"
36+
fi
37+
38+
echo "Input directory: ${INPUT}"
39+
echo "Output directory: ${OUTPUT_FOLDER}"
40+
41+
cmd_array=( 'import sys,os;'
42+
'sys.path.insert(0,os.environ["SCRIPT_DIR"]);'
43+
'import unet_prediction;'
44+
'unet_prediction.run_unet_prediction_preprocess_slurm(input_path=os.environ["INPUT"],'
45+
'input_key=os.environ["INPUT_KEY"],'
46+
'output_folder=os.environ["OUTPUT_FOLDER"],seg_class=os.environ["SEG_CLASS"])')
47+
cmd="${cmd_array[*]}"
48+
python -c "$cmd"
49+

0 commit comments

Comments
 (0)