Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
6 changes: 5 additions & 1 deletion flamingo_tools/data_conversion.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,11 @@
import numpy as np
import pybdv

from cluster_tools.utils.volume_utils import write_format_metadata
try:
from cluster_tools.utils.volume_utils import write_format_metadata
except ImportError:
write_format_metadata = None

from elf.io import open_file
from skimage.transform import rescale

Expand Down
18 changes: 18 additions & 0 deletions reproducibility/templates_processing/REAMDE.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,18 @@
# Segmentation and detection workflows

Implements workflows to segment SGNs or IHCs, and to detect ribbon synapses in slurm.

For SGN segmentation run:
- mean_std_SGN_template.sbatch
- apply_unet_SGN_template.sbatch
- segment_unet_SGN_template.sbatch

For IHC segmentation run:
- mean_std_IHC_template.sbatch
- apply_unet_IHC_template.sbatch
- segment_unet_IHC_template.sbatch

For ribbon synapse detection without associated IHC segmentation run
- detect_synapse_template.sbatch
For ribbon synapse detection with associated IHC segmentation run
- detect_synapse_marker_template.sbatch
Original file line number Diff line number Diff line change
@@ -0,0 +1,60 @@
#!/bin/bash
#SBATCH --job-name=apply-unet-IHC
#SBATCH -t 02:00:00 # for gerbil up to ~3 hours

#SBATCH -p grete:shared # the partition
#SBATCH -G A100:1 # For requesting 1 A100 GPU.
#SBATCH -c 1
#SBATCH --mem 24G
#SBATCH -a 0-9

source ~/.bashrc
# micromamba activate micro-sam_gpu
micromamba activate sam

# Print out some info.
echo "Submitting job with sbatch from directory: ${SLURM_SUBMIT_DIR}"
echo "Home directory: ${HOME}"
echo "Working directory: $PWD"
echo "Current node: ${SLURM_NODELIST}"

# Run the script

# SCRIPT_REPO=/user/schilling40/u15000/flamingo-tools
SCRIPT_REPO=/user/pape41/u12086/Work/my_projects/flamingo-tools
cd "$SCRIPT_REPO"/flamingo_tools/segmentation/ || exit

export SCRIPT_DIR=$SCRIPT_REPO/scripts

# name of cochlea, as it appears in MoBIE and the NHR
COCHLEA=$1
# data in n5 format, e.g. GEK11L_PV_GFP_01_fused.n5
DATA=$2
# channel in n5 folder, e.g. 0, 1, or 2
STAIN_CHANNEL=$3
# segmentation name, as it appears in MoBIE, e.g. IHC_v4
SEG_NAME=$4

export INPUT=/mnt/vast-nhr/projects/nim00007/data/moser/cochlea-lightsheet/"$COCHLEA"/"$DATA"

export OUTPUT_FOLDER=/mnt/vast-nhr/projects/nim00007/data/moser/cochlea-lightsheet/predictions/"$COCHLEA"/"$SEG_NAME"

# export MODEL=/mnt/vast-nhr/projects/nim00007/data/moser/cochlea-lightsheet/trained_models/IHC/v4_cochlea_distance_unet_IHC_supervised_2025-07-14
export MODEL=/mnt/vast-nhr/projects/nim00007/data/moser/cochlea-lightsheet/trained_models/IHC/v5_cochlea_distance_unet_IHC_supervised_2025-08-20/
export PREDICTION_INSTANCES=10
export INPUT_KEY="setup$STAIN_CHANNEL/timepoint0/s0"

echo "Input directory: ${INPUT}"
echo "Output directory: ${OUTPUT_FOLDER}"
echo "Model: ${MODEL}"

cmd_array=( 'import sys,os;'
'sys.path.insert(0,os.environ["SCRIPT_DIR"]);'
'import unet_prediction;'
'unet_prediction.run_unet_prediction_slurm(input_path=os.environ["INPUT"],'
'output_folder=os.environ["OUTPUT_FOLDER"],model_path=os.environ["MODEL"],'
'input_key=os.environ["INPUT_KEY"],'
'prediction_instances=os.environ["PREDICTION_INSTANCES"])')
cmd="${cmd_array[*]}"
python -c "$cmd"

Original file line number Diff line number Diff line change
@@ -0,0 +1,57 @@
#!/bin/bash
#SBATCH --job-name=apply-unet-SGN
#SBATCH -t 02:00:00 # for gerbil up to ~3 hours

#SBATCH -p grete:shared # the partition
#SBATCH -G A100:1 # For requesting 1 A100 GPU.
#SBATCH -c 1
#SBATCH --mem 24G
#SBATCH -a 0-9

source ~/.bashrc
micromamba activate micro-sam_gpu

# Print out some info.
echo "Submitting job with sbatch from directory: ${SLURM_SUBMIT_DIR}"
echo "Home directory: ${HOME}"
echo "Working directory: $PWD"
echo "Current node: ${SLURM_NODELIST}"

# Run the script

SCRIPT_REPO=/user/schilling40/u15000/flamingo-tools
cd "$SCRIPT_REPO"/flamingo_tools/segmentation/ || exit

export SCRIPT_DIR=$SCRIPT_REPO/scripts

# name of cochlea, as it appears in MoBIE and the NHR
COCHLEA=$1
# data in n5 format, e.g. GEK11L_PV_GFP_01_fused.n5
DATA=$2
# channel in n5 folder, e.g. 0, 1, or 2
STAIN_CHANNEL=$3
# segmentation name, as it appears in MoBIE, e.g. SGN_v2 or Calb1_SGN_v2
SEG_NAME=$4

export INPUT=/mnt/vast-nhr/projects/nim00007/data/moser/cochlea-lightsheet/"$COCHLEA"/"$DATA"

export OUTPUT_FOLDER=/mnt/vast-nhr/projects/nim00007/data/moser/cochlea-lightsheet/predictions/"$COCHLEA"/"$SEG_NAME"

export MODEL=/mnt/vast-nhr/projects/nim00007/data/moser/cochlea-lightsheet/trained_models/SGN/v2_cochlea_distance_unet_SGN_supervised_2025-05-27
export PREDICTION_INSTANCES=10
export INPUT_KEY="setup$STAIN_CHANNEL/timepoint0/s0"

echo "Input directory: ${INPUT}"
echo "Output directory: ${OUTPUT_FOLDER}"
echo "Model: ${MODEL}"

cmd_array=( 'import sys,os;'
'sys.path.insert(0,os.environ["SCRIPT_DIR"]);'
'import unet_prediction;'
'unet_prediction.run_unet_prediction_slurm(input_path=os.environ["INPUT"],'
'output_folder=os.environ["OUTPUT_FOLDER"],model_path=os.environ["MODEL"],'
'input_key=os.environ["INPUT_KEY"],'
'prediction_instances=os.environ["PREDICTION_INSTANCES"])')
cmd="${cmd_array[*]}"
python -c "$cmd"

Original file line number Diff line number Diff line change
@@ -0,0 +1,57 @@
#!/bin/bash
#SBATCH --job-name=synapse-marker
#SBATCH -t 08:00:00 # estimated time, adapt to your needs
#SBATCH [email protected] # change this to your mailaddress
#SBATCH --mail-type=FAIL # send mail when job begins and ends

#SBATCH -p grete:shared # the partition
#SBATCH -G A100:1 # For requesting 1 A100 GPU.
#SBATCH -A nim00007
#SBATCH -c 2
#SBATCH --mem 36G

source ~/.bashrc
micromamba activate micro-sam_gpu

# Print out some info.
echo "Submitting job with sbatch from directory: ${SLURM_SUBMIT_DIR}"
echo "Home directory: ${HOME}"
echo "Working directory: $PWD"
echo "Current node: ${SLURM_NODELIST}"

# Run the script
#python myprogram.py $SLURM_ARRAY_TASK_ID

SCRIPT_REPO=/user/schilling40/u15000/flamingo-tools
cd "$SCRIPT_REPO"/flamingo_tools/segmentation/ || exit

export SCRIPT_DIR=$SCRIPT_REPO/scripts

# name of cochlea, as it appears in MoBIE and the NHR
COCHLEA=$1
# image channel, e.g. CTBP2 or RibA
IMAGE_CHANNEL=$2
# segmentation name, as it appears in MoBIE, e.g. synapses_v3
IHC_SEG=$3

export INPUT_PATH="$COCHLEA"/images/ome-zarr/"$IMAGE_CHANNEL".ome.zarr
export MASK_PATH="$COCHLEA"/images/ome-zarr/"$IHC_SEG".ome.zarr

export OUTPUT_FOLDER=/mnt/vast-nhr/projects/nim00007/data/moser/cochlea-lightsheet/predictions/"$COCHLEA"/synapses_v3_"$IHC_SEG"

export MODEL=/mnt/vast-nhr/projects/nim00007/data/moser/cochlea-lightsheet/trained_models/Synapses/synapse_detection_model_v3.pt
export INPUT_KEY="s0"
export MAX_DISTANCE=8

echo "OUTPUT_FOLDER $OUTPUT_FOLDER"
echo "MODEL $MODEL"

python ~/flamingo-tools/scripts/synapse_marker_detection/marker_detection.py \
--input "$INPUT_PATH" \
--input_key $INPUT_KEY \
--output_folder "$OUTPUT_FOLDER" \
--mask "$MASK_PATH" \
--model $MODEL \
--max_distance $MAX_DISTANCE \
--s3

Original file line number Diff line number Diff line change
@@ -0,0 +1,61 @@
#!/bin/bash
#SBATCH --job-name=synapse-detect
#SBATCH -t 42:00:00 # estimated time, adapt to your needs
#SBATCH [email protected] # change this to your mailaddress
#SBATCH --mail-type=FAIL # send mail when job begins and ends

#SBATCH -p grete:shared # the partition
#SBATCH -G A100:1 # For requesting 1 A100 GPU.
#SBATCH -A nim00007
#SBATCH -c 2
#SBATCH --mem 500G

source ~/.bashrc
micromamba activate micro-sam_gpu

# Print out some info.
echo "Submitting job with sbatch from directory: ${SLURM_SUBMIT_DIR}"
echo "Home directory: ${HOME}"
echo "Working directory: $PWD"
echo "Current node: ${SLURM_NODELIST}"

# Run the script
#python myprogram.py $SLURM_ARRAY_TASK_ID

SCRIPT_REPO=/user/schilling40/u15000/flamingo-tools
cd "$SCRIPT_REPO"/flamingo_tools/segmentation/ || exit

export SCRIPT_DIR=$SCRIPT_REPO/scripts

# name of cochlea, as it appears in MoBIE and the NHR
COCHLEA=$1
# image channel, e.g. CTBP2 or RibA
IMAGE_CHANNEL=$2
# segmentation name, as it appears in MoBIE, e.g. synapses_v3
IHC_SEG=$3

export INPUT_PATH="$COCHLEA"/images/ome-zarr/"$IMAGE_CHANNEL".ome.zarr
export MASK_PATH="$COCHLEA"/images/ome-zarr/"$IHC_SEG".ome.zarr

# data on NHR
# export INPUT_PATH=/mnt/vast-nhr/projects/nim00007/data/moser/cochlea-lightsheet/"$COCHLEA"/"$DATA"
# export INPUT_KEY="setup$STAIN_CHANNEL/timepoint0/s0"

export OUTPUT_FOLDER=/mnt/vast-nhr/projects/nim00007/data/moser/cochlea-lightsheet/predictions/"$COCHLEA"/synapses_v3

if ! [[ -f $OUTPUT_FOLDER ]] ; then
mkdir -p "$OUTPUT_FOLDER"
fi

export MODEL=/mnt/vast-nhr/projects/nim00007/data/moser/cochlea-lightsheet/trained_models/Synapses/synapse_detection_model_v3.pt

echo "OUTPUT_FOLDER $OUTPUT_FOLDER"
echo "MODEL $MODEL"

python ~/flamingo-tools/scripts/synapse_marker_detection/run_prediction.py \
--input "$INPUT_PATH" \
--input_key "$INPUT_KEY" \
--output_folder "$OUTPUT_FOLDER" \
--model $MODEL \
--s3

51 changes: 51 additions & 0 deletions reproducibility/templates_processing/mean_std_IHC_template.sbatch
Original file line number Diff line number Diff line change
@@ -0,0 +1,51 @@
#!/bin/bash
#SBATCH --job-name=mean-std-IHC
#SBATCH -t 01:00:00 # usually takes ~20 min

#SBATCH -p standard96s:shared # the partition
#SBATCH -A nim00007
#SBATCH -c 3
#SBATCH --mem 128G

source ~/.bashrc
# micromamba activate flamingo13
micromamba activate sam

# Run the script

# SCRIPT_REPO=/user/schilling40/u15000/flamingo-tools
SCRIPT_REPO=/user/pape41/u12086/Work/my_projects/flamingo-tools
cd "$SCRIPT_REPO"/flamingo_tools/segmentation/ || exit

export SCRIPT_DIR=$SCRIPT_REPO/scripts

# name of cochlea, as it appears in MoBIE and the NHR
COCHLEA=$1
# data in n5 format, e.g. GEK11L_PV_GFP_01_fused.n5
DATA=$2
# channel in n5 folder, e.g. 0, 1, or 2
STAIN_CHANNEL=$3
# segmentation name, as it appears in MoBIE, e.g. IHC_v4
SEG_NAME=$4

export INPUT=/mnt/vast-nhr/projects/nim00007/data/moser/cochlea-lightsheet/"$COCHLEA"/"$DATA"
export OUTPUT_FOLDER=/mnt/vast-nhr/projects/nim00007/data/moser/cochlea-lightsheet/predictions/"$COCHLEA"/"$SEG_NAME"
export SEG_CLASS="ihc"
export INPUT_KEY="setup$STAIN_CHANNEL/timepoint0/s0"

if ! [[ -f $OUTPUT_FOLDER ]] ; then
mkdir -p "$OUTPUT_FOLDER"
fi

echo "Input directory: ${INPUT}"
echo "Output directory: ${OUTPUT_FOLDER}"

cmd_array=( 'import sys,os;'
'sys.path.insert(0,os.environ["SCRIPT_DIR"]);'
'import unet_prediction;'
'unet_prediction.run_unet_prediction_preprocess_slurm(input_path=os.environ["INPUT"],'
'input_key=os.environ["INPUT_KEY"],'
'output_folder=os.environ["OUTPUT_FOLDER"],seg_class=os.environ["SEG_CLASS"])')
cmd="${cmd_array[*]}"
python -c "$cmd"

49 changes: 49 additions & 0 deletions reproducibility/templates_processing/mean_std_SGN_template.sbatch
Original file line number Diff line number Diff line change
@@ -0,0 +1,49 @@
#!/bin/bash
#SBATCH --job-name=mean-std-SGN
#SBATCH -t 01:00:00 # usually takes ~20 min

#SBATCH -p standard96s:shared # the partition
#SBATCH -A nim00007
#SBATCH -c 3
#SBATCH --mem 128G

source ~/.bashrc
micromamba activate flamingo13

# Run the script

SCRIPT_REPO=/user/schilling40/u15000/flamingo-tools
cd "$SCRIPT_REPO"/flamingo_tools/segmentation/ || exit

export SCRIPT_DIR=$SCRIPT_REPO/scripts

# name of cochlea, as it appears in MoBIE and the NHR
COCHLEA=$1
# data in n5 format, e.g. GEK11L_PV_GFP_01_fused.n5
DATA=$2
# channel in n5 folder, e.g. 0, 1, or 2
STAIN_CHANNEL=$3
# segmentation name, as it appears in MoBIE, e.g. SGN_v2 or Calb1_SGN_v2
SEG_NAME=$4

export INPUT=/mnt/vast-nhr/projects/nim00007/data/moser/cochlea-lightsheet/"$COCHLEA"/"$DATA"
export OUTPUT_FOLDER=/mnt/vast-nhr/projects/nim00007/data/moser/cochlea-lightsheet/predictions/"$COCHLEA"/"$SEG_NAME"
export SEG_CLASS="sgn"
export INPUT_KEY="setup$STAIN_CHANNEL/timepoint0/s0"

if ! [[ -f $OUTPUT_FOLDER ]] ; then
mkdir -p "$OUTPUT_FOLDER"
fi

echo "Input directory: ${INPUT}"
echo "Output directory: ${OUTPUT_FOLDER}"

cmd_array=( 'import sys,os;'
'sys.path.insert(0,os.environ["SCRIPT_DIR"]);'
'import unet_prediction;'
'unet_prediction.run_unet_prediction_preprocess_slurm(input_path=os.environ["INPUT"],'
'input_key=os.environ["INPUT_KEY"],'
'output_folder=os.environ["OUTPUT_FOLDER"],seg_class=os.environ["SEG_CLASS"])')
cmd="${cmd_array[*]}"
python -c "$cmd"

Loading
Loading