diff --git a/flamingo_tools/data_conversion.py b/flamingo_tools/data_conversion.py index 9bc4723..d31dcca 100644 --- a/flamingo_tools/data_conversion.py +++ b/flamingo_tools/data_conversion.py @@ -10,7 +10,11 @@ import numpy as np import pybdv -from cluster_tools.utils.volume_utils import write_format_metadata +try: + from cluster_tools.utils.volume_utils import write_format_metadata +except ImportError: + write_format_metadata = None + from elf.io import open_file from skimage.transform import rescale diff --git a/reproducibility/templates_processing/REAMDE.md b/reproducibility/templates_processing/REAMDE.md new file mode 100644 index 0000000..2de26aa --- /dev/null +++ b/reproducibility/templates_processing/REAMDE.md @@ -0,0 +1,18 @@ +# Segmentation and detection workflows + +Implements workflows to segment SGNs or IHCs, and to detect ribbon synapses in slurm. + +For SGN segmentation run: +- mean_std_SGN_template.sbatch +- apply_unet_SGN_template.sbatch +- segment_unet_SGN_template.sbatch + +For IHC segmentation run: +- mean_std_IHC_template.sbatch +- apply_unet_IHC_template.sbatch +- segment_unet_IHC_template.sbatch + +For ribbon synapse detection without associated IHC segmentation run +- detect_synapse_template.sbatch +For ribbon synapse detection with associated IHC segmentation run +- detect_synapse_marker_template.sbatch diff --git a/reproducibility/templates_processing/apply_unet_IHC_template.sbatch b/reproducibility/templates_processing/apply_unet_IHC_template.sbatch new file mode 100644 index 0000000..0d581ff --- /dev/null +++ b/reproducibility/templates_processing/apply_unet_IHC_template.sbatch @@ -0,0 +1,60 @@ +#!/bin/bash +#SBATCH --job-name=apply-unet-IHC +#SBATCH -t 02:00:00 # for gerbil up to ~3 hours + +#SBATCH -p grete:shared # the partition +#SBATCH -G A100:1 # For requesting 1 A100 GPU. +#SBATCH -c 1 +#SBATCH --mem 24G +#SBATCH -a 0-9 + +source ~/.bashrc +# micromamba activate micro-sam_gpu +micromamba activate sam + +# Print out some info. +echo "Submitting job with sbatch from directory: ${SLURM_SUBMIT_DIR}" +echo "Home directory: ${HOME}" +echo "Working directory: $PWD" +echo "Current node: ${SLURM_NODELIST}" + +# Run the script + +# SCRIPT_REPO=/user/schilling40/u15000/flamingo-tools +SCRIPT_REPO=/user/pape41/u12086/Work/my_projects/flamingo-tools +cd "$SCRIPT_REPO"/flamingo_tools/segmentation/ || exit + +export SCRIPT_DIR=$SCRIPT_REPO/scripts + +# name of cochlea, as it appears in MoBIE and the NHR +COCHLEA=$1 +# data in n5 format, e.g. GEK11L_PV_GFP_01_fused.n5 +DATA=$2 +# channel in n5 folder, e.g. 0, 1, or 2 +STAIN_CHANNEL=$3 +# segmentation name, as it appears in MoBIE, e.g. IHC_v4 +SEG_NAME=$4 + +export INPUT=/mnt/vast-nhr/projects/nim00007/data/moser/cochlea-lightsheet/"$COCHLEA"/"$DATA" + +export OUTPUT_FOLDER=/mnt/vast-nhr/projects/nim00007/data/moser/cochlea-lightsheet/predictions/"$COCHLEA"/"$SEG_NAME" + +# export MODEL=/mnt/vast-nhr/projects/nim00007/data/moser/cochlea-lightsheet/trained_models/IHC/v4_cochlea_distance_unet_IHC_supervised_2025-07-14 +export MODEL=/mnt/vast-nhr/projects/nim00007/data/moser/cochlea-lightsheet/trained_models/IHC/v5_cochlea_distance_unet_IHC_supervised_2025-08-20/ +export PREDICTION_INSTANCES=10 +export INPUT_KEY="setup$STAIN_CHANNEL/timepoint0/s0" + +echo "Input directory: ${INPUT}" +echo "Output directory: ${OUTPUT_FOLDER}" +echo "Model: ${MODEL}" + +cmd_array=( 'import sys,os;' + 'sys.path.insert(0,os.environ["SCRIPT_DIR"]);' + 'import unet_prediction;' + 'unet_prediction.run_unet_prediction_slurm(input_path=os.environ["INPUT"],' + 'output_folder=os.environ["OUTPUT_FOLDER"],model_path=os.environ["MODEL"],' + 'input_key=os.environ["INPUT_KEY"],' + 'prediction_instances=os.environ["PREDICTION_INSTANCES"])') +cmd="${cmd_array[*]}" +python -c "$cmd" + diff --git a/reproducibility/templates_processing/apply_unet_SGN_template.sbatch b/reproducibility/templates_processing/apply_unet_SGN_template.sbatch new file mode 100644 index 0000000..fb3d1a9 --- /dev/null +++ b/reproducibility/templates_processing/apply_unet_SGN_template.sbatch @@ -0,0 +1,57 @@ +#!/bin/bash +#SBATCH --job-name=apply-unet-SGN +#SBATCH -t 02:00:00 # for gerbil up to ~3 hours + +#SBATCH -p grete:shared # the partition +#SBATCH -G A100:1 # For requesting 1 A100 GPU. +#SBATCH -c 1 +#SBATCH --mem 24G +#SBATCH -a 0-9 + +source ~/.bashrc +micromamba activate micro-sam_gpu + +# Print out some info. +echo "Submitting job with sbatch from directory: ${SLURM_SUBMIT_DIR}" +echo "Home directory: ${HOME}" +echo "Working directory: $PWD" +echo "Current node: ${SLURM_NODELIST}" + +# Run the script + +SCRIPT_REPO=/user/schilling40/u15000/flamingo-tools +cd "$SCRIPT_REPO"/flamingo_tools/segmentation/ || exit + +export SCRIPT_DIR=$SCRIPT_REPO/scripts + +# name of cochlea, as it appears in MoBIE and the NHR +COCHLEA=$1 +# data in n5 format, e.g. GEK11L_PV_GFP_01_fused.n5 +DATA=$2 +# channel in n5 folder, e.g. 0, 1, or 2 +STAIN_CHANNEL=$3 +# segmentation name, as it appears in MoBIE, e.g. SGN_v2 or Calb1_SGN_v2 +SEG_NAME=$4 + +export INPUT=/mnt/vast-nhr/projects/nim00007/data/moser/cochlea-lightsheet/"$COCHLEA"/"$DATA" + +export OUTPUT_FOLDER=/mnt/vast-nhr/projects/nim00007/data/moser/cochlea-lightsheet/predictions/"$COCHLEA"/"$SEG_NAME" + +export MODEL=/mnt/vast-nhr/projects/nim00007/data/moser/cochlea-lightsheet/trained_models/SGN/v2_cochlea_distance_unet_SGN_supervised_2025-05-27 +export PREDICTION_INSTANCES=10 +export INPUT_KEY="setup$STAIN_CHANNEL/timepoint0/s0" + +echo "Input directory: ${INPUT}" +echo "Output directory: ${OUTPUT_FOLDER}" +echo "Model: ${MODEL}" + +cmd_array=( 'import sys,os;' + 'sys.path.insert(0,os.environ["SCRIPT_DIR"]);' + 'import unet_prediction;' + 'unet_prediction.run_unet_prediction_slurm(input_path=os.environ["INPUT"],' + 'output_folder=os.environ["OUTPUT_FOLDER"],model_path=os.environ["MODEL"],' + 'input_key=os.environ["INPUT_KEY"],' + 'prediction_instances=os.environ["PREDICTION_INSTANCES"])') +cmd="${cmd_array[*]}" +python -c "$cmd" + diff --git a/reproducibility/templates_processing/detect_synapse_marker_template.sbatch b/reproducibility/templates_processing/detect_synapse_marker_template.sbatch new file mode 100644 index 0000000..eb97e09 --- /dev/null +++ b/reproducibility/templates_processing/detect_synapse_marker_template.sbatch @@ -0,0 +1,57 @@ +#!/bin/bash +#SBATCH --job-name=synapse-marker +#SBATCH -t 08:00:00 # estimated time, adapt to your needs +#SBATCH --mail-user=martin.schilling@med.uni-goettingen.de # change this to your mailaddress +#SBATCH --mail-type=FAIL # send mail when job begins and ends + +#SBATCH -p grete:shared # the partition +#SBATCH -G A100:1 # For requesting 1 A100 GPU. +#SBATCH -A nim00007 +#SBATCH -c 2 +#SBATCH --mem 36G + +source ~/.bashrc +micromamba activate micro-sam_gpu + +# Print out some info. +echo "Submitting job with sbatch from directory: ${SLURM_SUBMIT_DIR}" +echo "Home directory: ${HOME}" +echo "Working directory: $PWD" +echo "Current node: ${SLURM_NODELIST}" + +# Run the script +#python myprogram.py $SLURM_ARRAY_TASK_ID + +SCRIPT_REPO=/user/schilling40/u15000/flamingo-tools +cd "$SCRIPT_REPO"/flamingo_tools/segmentation/ || exit + +export SCRIPT_DIR=$SCRIPT_REPO/scripts + +# name of cochlea, as it appears in MoBIE and the NHR +COCHLEA=$1 +# image channel, e.g. CTBP2 or RibA +IMAGE_CHANNEL=$2 +# segmentation name, as it appears in MoBIE, e.g. synapses_v3 +IHC_SEG=$3 + +export INPUT_PATH="$COCHLEA"/images/ome-zarr/"$IMAGE_CHANNEL".ome.zarr +export MASK_PATH="$COCHLEA"/images/ome-zarr/"$IHC_SEG".ome.zarr + +export OUTPUT_FOLDER=/mnt/vast-nhr/projects/nim00007/data/moser/cochlea-lightsheet/predictions/"$COCHLEA"/synapses_v3_"$IHC_SEG" + +export MODEL=/mnt/vast-nhr/projects/nim00007/data/moser/cochlea-lightsheet/trained_models/Synapses/synapse_detection_model_v3.pt +export INPUT_KEY="s0" +export MAX_DISTANCE=8 + +echo "OUTPUT_FOLDER $OUTPUT_FOLDER" +echo "MODEL $MODEL" + +python ~/flamingo-tools/scripts/synapse_marker_detection/marker_detection.py \ + --input "$INPUT_PATH" \ + --input_key $INPUT_KEY \ + --output_folder "$OUTPUT_FOLDER" \ + --mask "$MASK_PATH" \ + --model $MODEL \ + --max_distance $MAX_DISTANCE \ + --s3 + diff --git a/reproducibility/templates_processing/detect_synapse_template.sbatch b/reproducibility/templates_processing/detect_synapse_template.sbatch new file mode 100644 index 0000000..4cc292b --- /dev/null +++ b/reproducibility/templates_processing/detect_synapse_template.sbatch @@ -0,0 +1,61 @@ +#!/bin/bash +#SBATCH --job-name=synapse-detect +#SBATCH -t 42:00:00 # estimated time, adapt to your needs +#SBATCH --mail-user=martin.schilling@med.uni-goettingen.de # change this to your mailaddress +#SBATCH --mail-type=FAIL # send mail when job begins and ends + +#SBATCH -p grete:shared # the partition +#SBATCH -G A100:1 # For requesting 1 A100 GPU. +#SBATCH -A nim00007 +#SBATCH -c 2 +#SBATCH --mem 500G + +source ~/.bashrc +micromamba activate micro-sam_gpu + +# Print out some info. +echo "Submitting job with sbatch from directory: ${SLURM_SUBMIT_DIR}" +echo "Home directory: ${HOME}" +echo "Working directory: $PWD" +echo "Current node: ${SLURM_NODELIST}" + +# Run the script +#python myprogram.py $SLURM_ARRAY_TASK_ID + +SCRIPT_REPO=/user/schilling40/u15000/flamingo-tools +cd "$SCRIPT_REPO"/flamingo_tools/segmentation/ || exit + +export SCRIPT_DIR=$SCRIPT_REPO/scripts + +# name of cochlea, as it appears in MoBIE and the NHR +COCHLEA=$1 +# image channel, e.g. CTBP2 or RibA +IMAGE_CHANNEL=$2 +# segmentation name, as it appears in MoBIE, e.g. synapses_v3 +IHC_SEG=$3 + +export INPUT_PATH="$COCHLEA"/images/ome-zarr/"$IMAGE_CHANNEL".ome.zarr +export MASK_PATH="$COCHLEA"/images/ome-zarr/"$IHC_SEG".ome.zarr + +# data on NHR +# export INPUT_PATH=/mnt/vast-nhr/projects/nim00007/data/moser/cochlea-lightsheet/"$COCHLEA"/"$DATA" +# export INPUT_KEY="setup$STAIN_CHANNEL/timepoint0/s0" + +export OUTPUT_FOLDER=/mnt/vast-nhr/projects/nim00007/data/moser/cochlea-lightsheet/predictions/"$COCHLEA"/synapses_v3 + +if ! [[ -f $OUTPUT_FOLDER ]] ; then + mkdir -p "$OUTPUT_FOLDER" +fi + +export MODEL=/mnt/vast-nhr/projects/nim00007/data/moser/cochlea-lightsheet/trained_models/Synapses/synapse_detection_model_v3.pt + +echo "OUTPUT_FOLDER $OUTPUT_FOLDER" +echo "MODEL $MODEL" + +python ~/flamingo-tools/scripts/synapse_marker_detection/run_prediction.py \ + --input "$INPUT_PATH" \ + --input_key "$INPUT_KEY" \ + --output_folder "$OUTPUT_FOLDER" \ + --model $MODEL \ + --s3 + diff --git a/reproducibility/templates_processing/mean_std_IHC_template.sbatch b/reproducibility/templates_processing/mean_std_IHC_template.sbatch new file mode 100644 index 0000000..d11b91f --- /dev/null +++ b/reproducibility/templates_processing/mean_std_IHC_template.sbatch @@ -0,0 +1,51 @@ +#!/bin/bash +#SBATCH --job-name=mean-std-IHC +#SBATCH -t 01:00:00 # usually takes ~20 min + +#SBATCH -p standard96s:shared # the partition +#SBATCH -A nim00007 +#SBATCH -c 3 +#SBATCH --mem 128G + +source ~/.bashrc +# micromamba activate flamingo13 +micromamba activate sam + +# Run the script + +# SCRIPT_REPO=/user/schilling40/u15000/flamingo-tools +SCRIPT_REPO=/user/pape41/u12086/Work/my_projects/flamingo-tools +cd "$SCRIPT_REPO"/flamingo_tools/segmentation/ || exit + +export SCRIPT_DIR=$SCRIPT_REPO/scripts + +# name of cochlea, as it appears in MoBIE and the NHR +COCHLEA=$1 +# data in n5 format, e.g. GEK11L_PV_GFP_01_fused.n5 +DATA=$2 +# channel in n5 folder, e.g. 0, 1, or 2 +STAIN_CHANNEL=$3 +# segmentation name, as it appears in MoBIE, e.g. IHC_v4 +SEG_NAME=$4 + +export INPUT=/mnt/vast-nhr/projects/nim00007/data/moser/cochlea-lightsheet/"$COCHLEA"/"$DATA" +export OUTPUT_FOLDER=/mnt/vast-nhr/projects/nim00007/data/moser/cochlea-lightsheet/predictions/"$COCHLEA"/"$SEG_NAME" +export SEG_CLASS="ihc" +export INPUT_KEY="setup$STAIN_CHANNEL/timepoint0/s0" + +if ! [[ -f $OUTPUT_FOLDER ]] ; then + mkdir -p "$OUTPUT_FOLDER" +fi + +echo "Input directory: ${INPUT}" +echo "Output directory: ${OUTPUT_FOLDER}" + +cmd_array=( 'import sys,os;' + 'sys.path.insert(0,os.environ["SCRIPT_DIR"]);' + 'import unet_prediction;' + 'unet_prediction.run_unet_prediction_preprocess_slurm(input_path=os.environ["INPUT"],' + 'input_key=os.environ["INPUT_KEY"],' + 'output_folder=os.environ["OUTPUT_FOLDER"],seg_class=os.environ["SEG_CLASS"])') +cmd="${cmd_array[*]}" +python -c "$cmd" + diff --git a/reproducibility/templates_processing/mean_std_SGN_template.sbatch b/reproducibility/templates_processing/mean_std_SGN_template.sbatch new file mode 100644 index 0000000..f9e3ee9 --- /dev/null +++ b/reproducibility/templates_processing/mean_std_SGN_template.sbatch @@ -0,0 +1,49 @@ +#!/bin/bash +#SBATCH --job-name=mean-std-SGN +#SBATCH -t 01:00:00 # usually takes ~20 min + +#SBATCH -p standard96s:shared # the partition +#SBATCH -A nim00007 +#SBATCH -c 3 +#SBATCH --mem 128G + +source ~/.bashrc +micromamba activate flamingo13 + +# Run the script + +SCRIPT_REPO=/user/schilling40/u15000/flamingo-tools +cd "$SCRIPT_REPO"/flamingo_tools/segmentation/ || exit + +export SCRIPT_DIR=$SCRIPT_REPO/scripts + +# name of cochlea, as it appears in MoBIE and the NHR +COCHLEA=$1 +# data in n5 format, e.g. GEK11L_PV_GFP_01_fused.n5 +DATA=$2 +# channel in n5 folder, e.g. 0, 1, or 2 +STAIN_CHANNEL=$3 +# segmentation name, as it appears in MoBIE, e.g. SGN_v2 or Calb1_SGN_v2 +SEG_NAME=$4 + +export INPUT=/mnt/vast-nhr/projects/nim00007/data/moser/cochlea-lightsheet/"$COCHLEA"/"$DATA" +export OUTPUT_FOLDER=/mnt/vast-nhr/projects/nim00007/data/moser/cochlea-lightsheet/predictions/"$COCHLEA"/"$SEG_NAME" +export SEG_CLASS="sgn" +export INPUT_KEY="setup$STAIN_CHANNEL/timepoint0/s0" + +if ! [[ -f $OUTPUT_FOLDER ]] ; then + mkdir -p "$OUTPUT_FOLDER" +fi + +echo "Input directory: ${INPUT}" +echo "Output directory: ${OUTPUT_FOLDER}" + +cmd_array=( 'import sys,os;' + 'sys.path.insert(0,os.environ["SCRIPT_DIR"]);' + 'import unet_prediction;' + 'unet_prediction.run_unet_prediction_preprocess_slurm(input_path=os.environ["INPUT"],' + 'input_key=os.environ["INPUT_KEY"],' + 'output_folder=os.environ["OUTPUT_FOLDER"],seg_class=os.environ["SEG_CLASS"])') +cmd="${cmd_array[*]}" +python -c "$cmd" + diff --git a/reproducibility/templates_processing/segment_unet_IHC_template.sbatch b/reproducibility/templates_processing/segment_unet_IHC_template.sbatch new file mode 100644 index 0000000..8f37afb --- /dev/null +++ b/reproducibility/templates_processing/segment_unet_IHC_template.sbatch @@ -0,0 +1,53 @@ +#!/bin/bash +#SBATCH --job-name=segment-unet-SGN +#SBATCH --time 35:00:00 # for gerbil up to ~30 hours + +#SBATCH -p standard96s:shared # the partition +#SBATCH -A nim00007 +#SBATCH -c 12 +#SBATCH --mem 400G + +source ~/.bashrc +micromamba activate micro-sam_gpu + +# Print out some info. +echo "Submitting job with sbatch from directory: ${SLURM_SUBMIT_DIR}" +echo "Home directory: ${HOME}" +echo "Working directory: $PWD" +echo "Current node: ${SLURM_NODELIST}" + +# Run the script +#python myprogram.py $SLURM_ARRAY_TASK_ID + +SCRIPT_REPO=/user/schilling40/u15000/flamingo-tools +cd "$SCRIPT_REPO"/flamingo_tools/segmentation/ || exit + +# name of cochlea, as it appears in MoBIE and the NHR +COCHLEA=$1 +# segmentation name, as it appears in MoBIE, e.g. IHC_v4 +SEG_NAME=$2 + +export SCRIPT_DIR=$SCRIPT_REPO/scripts +export OUTPUT_FOLDER=/mnt/vast-nhr/projects/nim00007/data/moser/cochlea-lightsheet/predictions/"$COCHLEA"/"$SEG_NAME" +export MIN_SIZE=1000 + +# v4a +# export CENTER_DISTANCE_THRESHOLD=0.4 +# export BOUNDARY_DISTANCE_THRESHOLD=0.5 +# export DISTANCE_SMOOTHING=0 + +# v4b +export CENTER_DISTANCE_THRESHOLD=0.5 +export BOUNDARY_DISTANCE_THRESHOLD=0.6 +export DISTANCE_SMOOTHING=0.6 + +echo "Output directory: ${OUTPUT_FOLDER}" + +cmd_array=( 'import sys,os;' + 'sys.path.insert(0,os.environ["SCRIPT_DIR"]);' + 'import unet_prediction;' + 'unet_prediction.run_unet_segmentation_slurm(output_folder=os.environ["OUTPUT_FOLDER"],' + 'min_size=os.environ["MIN_SIZE"])') +cmd="${cmd_array[*]}" +python -c "$cmd" + diff --git a/reproducibility/templates_processing/segment_unet_SGN_template.sbatch b/reproducibility/templates_processing/segment_unet_SGN_template.sbatch new file mode 100644 index 0000000..8ee9371 --- /dev/null +++ b/reproducibility/templates_processing/segment_unet_SGN_template.sbatch @@ -0,0 +1,44 @@ +#!/bin/bash +#SBATCH --job-name=segment-unet-SGN +#SBATCH --time 10:00:00 # for gerbil up to ~30 hours + +#SBATCH -p standard96s:shared # the partition +#SBATCH -A nim00007 +#SBATCH -c 8 +#SBATCH --mem 400G + +source ~/.bashrc +micromamba activate micro-sam_gpu + +# Print out some info. +echo "Submitting job with sbatch from directory: ${SLURM_SUBMIT_DIR}" +echo "Home directory: ${HOME}" +echo "Working directory: $PWD" +echo "Current node: ${SLURM_NODELIST}" + +# Run the script +#python myprogram.py $SLURM_ARRAY_TASK_ID + +SCRIPT_REPO=/user/schilling40/u15000/flamingo-tools +cd "$SCRIPT_REPO"/flamingo_tools/segmentation/ || exit + +# name of cochlea, as it appears in MoBIE and the NHR +COCHLEA=$1 +# segmentation name, as it appears in MoBIE, e.g. SGN_v2 or Calb1_SGN_v2 +SEG_NAME=$2 + +export SCRIPT_DIR=$SCRIPT_REPO/scripts +export OUTPUT_FOLDER=/mnt/vast-nhr/projects/nim00007/data/moser/cochlea-lightsheet/predictions/"$COCHLEA"/"$SEG_NAME" +export MIN_SIZE=1000 +export BOUNDARY_DISTANCE_THRESHOLD=0.5 + +echo "Output directory: ${OUTPUT_FOLDER}" + +cmd_array=( 'import sys,os;' + 'sys.path.insert(0,os.environ["SCRIPT_DIR"]);' + 'import unet_prediction;' + 'unet_prediction.run_unet_segmentation_slurm(output_folder=os.environ["OUTPUT_FOLDER"],' + 'min_size=os.environ["MIN_SIZE"])') +cmd="${cmd_array[*]}" +python -c "$cmd" + diff --git a/reproducibility/templates_transfer/mobie_image_template.sbatch b/reproducibility/templates_transfer/mobie_image_template.sbatch new file mode 100644 index 0000000..22334d4 --- /dev/null +++ b/reproducibility/templates_transfer/mobie_image_template.sbatch @@ -0,0 +1,37 @@ +#!/bin/bash +#SBATCH --job-name=mobie_image +#SBATCH -t 01:00:00 # estimated time, adapt to your needs +#SBATCH --mail-user=martin.schilling@med.uni-goettingen.de # change this to your mailaddress +#SBATCH --mail-type=FAIL # send mail when job begins and ends + +#SBATCH -p standard96s:shared # the partition +#SBATCH -A nim00007 +#SBATCH -c 36 +#SBATCH --mem 180G + +source ~/.bashrc +source ~/miniconda3/bin/activate +source activate mobie + +# Run the script + +# name of cochlea, as it appears in MoBIE and the NHR +COCHLEA=$1 +# data in n5 format, e.g. GEK11L_PV_GFP_01_fused.n5 +DATA=$2 +# segmentation name, as it appears in MoBIE, e.g. PV or Calb1 +CHANNEL_NAME=$3 +# channel in n5 folder, e.g. 0, 1, or 2 +STAIN_CHANNEL=$4 + +MOBIE_PROJECT="/mnt/vast-nhr/projects/nim00007/data/moser/cochlea-lightsheet/mobie_project/cochlea-lightsheet" +INPUT_PATH=/mnt/vast-nhr/projects/nim00007/data/moser/cochlea-lightsheet/"$COCHLEA"/"$DATA" + +INPUT_KEY="setup$STAIN_CHANNEL/timepoint0/s0" +RESOLUTION="[0.38,0.38,0.38]" +SCALE_FACTORS="[[2,2,2],[2,2,2],[2,2,2],[2,2,2],[2,2,2],[2,2,2]]" +CHUNKS="[64,64,64]" + +mobie.add_image --input_path "$INPUT_PATH" --input_key "$INPUT_KEY" --root "$MOBIE_PROJECT" \ + --dataset_name "$COCHLEA" --name "$CHANNEL_NAME" --resolution "$RESOLUTION" \ + --scale_factors "$SCALE_FACTORS" --chunks "$CHUNKS" diff --git a/reproducibility/templates_transfer/mobie_segmentation_template.sbatch b/reproducibility/templates_transfer/mobie_segmentation_template.sbatch new file mode 100644 index 0000000..eb8bbf3 --- /dev/null +++ b/reproducibility/templates_transfer/mobie_segmentation_template.sbatch @@ -0,0 +1,36 @@ +#!/bin/bash +#SBATCH --job-name=mobie_segm +#SBATCH -t 01:00:00 # estimated time, adapt to your needs +#SBATCH --mail-user=martin.schilling@med.uni-goettingen.de # change this to your mailaddress +#SBATCH --mail-type=FAIL # send mail when job begins and ends + +#SBATCH -p standard96s:shared # the partition +#SBATCH -A nim00007 +#SBATCH -c 36 +#SBATCH --mem 180G + +source ~/.bashrc +source ~/miniconda3/bin/activate +source activate mobie + +# Run the script + +# name of cochlea, as it appears in MoBIE and the NHR +COCHLEA=$1 +# data in n5 format, e.g. GEK11L_PV_GFP_01_fused.n5 +DATA=$2 +# segmentation name, as it appears in MoBIE, e.g. PV or Calb1 +CHANNEL_NAME=$3 + +MOBIE_PROJECT="/mnt/vast-nhr/projects/nim00007/data/moser/cochlea-lightsheet/mobie_project/cochlea-lightsheet" +INPUT_PATH=/mnt/vast-nhr/projects/nim00007/data/moser/cochlea-lightsheet/"$COCHLEA"/"$DATA" + +SEGMENTATION_KEY="segmentation" + +RESOLUTION="[0.38,0.38,0.38]" +SCALE_FACTORS="[[2,2,2],[2,2,2],[2,2,2],[2,2,2],[2,2,2],[2,2,2]]" +CHUNKS="[64,64,64]" + +mobie.add_segmentation --input_path "$SEGMENTATION_PATH" --input_key "$SEGMENTATION_KEY" --root "$MOBIE_PROJECT" \ + --dataset_name "$COCHLEA" --name "$CHANNEL_NAME" --resolution "$RESOLUTION" \ + --scale_factors "$SCALE_FACTORS" --chunks "$CHUNKS" diff --git a/reproducibility/templates_transfer/mobie_spots_template.sbatch b/reproducibility/templates_transfer/mobie_spots_template.sbatch new file mode 100644 index 0000000..2cb4fba --- /dev/null +++ b/reproducibility/templates_transfer/mobie_spots_template.sbatch @@ -0,0 +1,28 @@ +#!/bin/bash +#SBATCH --job-name=mobie_spots +#SBATCH -t 00:10:00 # estimated time, adapt to your needs +#SBATCH --mail-user=martin.schilling@med.uni-goettingen.de # change this to your mailaddress +#SBATCH --mail-type=FAIL # send mail when job begins and ends + +#SBATCH -p standard96s:shared # the partition +#SBATCH -A nim00007 +#SBATCH -c 12 +#SBATCH --mem 16G + +source ~/.bashrc +source ~/miniconda3/bin/activate +source activate mobie + +# Run the script + +# name of cochlea, as it appears in MoBIE and the NHR +COCHLEA=$1 +# segmentation name, as it appears in MoBIE, e.g. synapses_v3 or synapses_v3_ihc_v4 +SPOT_NAME=$2 + +MOBIE_PROJECT="/mnt/vast-nhr/projects/nim00007/data/moser/cochlea-lightsheet/mobie_project/cochlea-lightsheet" + +TABLE_PATH=/mnt/vast-nhr/projects/nim00007/data/moser/cochlea-lightsheet/predictions/"$COCHLEA"/"$SPOT_NAME"/synapse_detection.tsv # synapse_detection_filtered.tsv + +mobie.add_spots --input_table "$TABLE_PATH" --root "$MOBIE_PROJECT" \ + --dataset_name "$COCHLEA" --name "$SPOT_NAME" diff --git a/reproducibility/templates_transfer/s3_cochlea_template.sh b/reproducibility/templates_transfer/s3_cochlea_template.sh new file mode 100644 index 0000000..30e4c13 --- /dev/null +++ b/reproducibility/templates_transfer/s3_cochlea_template.sh @@ -0,0 +1,11 @@ +#!/bin/bash + +MOBIE_DIR=/mnt/vast-nhr/projects/nim00007/data/moser/cochlea-lightsheet/mobie_project/cochlea-lightsheet +# e.g. M_AMD_Runx1_L +COCHLEA=$1 + +export BUCKET_NAME="cochlea-lightsheet" +export SERVICE_ENDPOINT="https://s3.fs.gwdg.de" +mobie.add_remote_metadata -i $MOBIE_DIR -s $SERVICE_ENDPOINT -b $BUCKET_NAME + +rclone --progress copyto "$MOBIE_DIR"/"$COCHLEA" cochlea-lightsheet:cochlea-lightsheet/"$COCHLEA" diff --git a/reproducibility/templates_transfer/s3_object_measures_template.sh b/reproducibility/templates_transfer/s3_object_measures_template.sh new file mode 100644 index 0000000..45375f7 --- /dev/null +++ b/reproducibility/templates_transfer/s3_object_measures_template.sh @@ -0,0 +1,23 @@ +#!/bin/bash + +MOBIE_DIR=/mnt/vast-nhr/projects/nim00007/data/moser/cochlea-lightsheet/mobie_project/cochlea-lightsheet +SEG_TABLE=PV-SGN-v2 +SEG_S3=PV_SGN_v2 + +COCHLEA_TABLE=M-AMD-N62-L +COCHLEA_S3=M_AMD_N62_L +STAINS=("Calb1" "CR") + +#COCHLEA_TABLE=M-AMD-Runx1-L +#COCHLEA_S3=M_AMD_Runx1_L +#STAINS=("CR" "Ntng1") + +#COCHLEA_TABLE=M-LR-000214-L +#COCHLEA_S3=M_LR_000214_L +#STAINS=("CR" "Calb1") + +for stain in "${STAINS[@]}" ; do + # use --dry-run for testing + rclone --progress copyto "$MOBIE_DIR"/tables/measurements2/"$COCHLEA_TABLE"_"$stain"_"$SEG_TABLE"_object-measures.tsv cochlea-lightsheet:cochlea-lightsheet/"$COCHLEA_S3"/tables/"$SEG_S3"/"$stain"_"$SEG_TABLE"_object-measures.tsv +done + diff --git a/reproducibility/templates_transfer/s3_seg_template.sh b/reproducibility/templates_transfer/s3_seg_template.sh new file mode 100644 index 0000000..2b4f100 --- /dev/null +++ b/reproducibility/templates_transfer/s3_seg_template.sh @@ -0,0 +1,18 @@ +#!/bin/bash + +MOBIE_DIR=/mnt/vast-nhr/projects/nim00007/data/moser/cochlea-lightsheet/mobie_project/cochlea-lightsheet +# e.g. M_AMD_Runx1_L +COCHLEA=$1 +# e.g. SGN_v2 +SEG_CHANNEL=$2 + +export BUCKET_NAME="cochlea-lightsheet" +export SERVICE_ENDPOINT="https://s3.fs.gwdg.de" + +mobie.add_remote_metadata -i $MOBIE_DIR -s $SERVICE_ENDPOINT -b $BUCKET_NAME + +rclone --progress copyto "$MOBIE_DIR"/"$COCHLEA"/dataset.json cochlea-lightsheet:cochlea-lightsheet/"$COCHLEA"/dataset.json +rclone --progress copyto "$MOBIE_DIR"/"$COCHLEA"/images/ome-zarr cochlea-lightsheet:cochlea-lightsheet/"$COCHLEA"/images/ome-zarr +# take care that segmentation tables containing evaluations (tonotopic mapping, marker labels, etc.) might be overwritten +rclone --progress copyto "$MOBIE_DIR"/"$COCHLEA"/tables/"$SEG_CHANNEL" cochlea-lightsheet:cochlea-lightsheet/"$COCHLEA"/tables/"$SEG_CHANNEL" + diff --git a/reproducibility/templates_transfer/s3_synapse_template.sh b/reproducibility/templates_transfer/s3_synapse_template.sh new file mode 100644 index 0000000..21634b2 --- /dev/null +++ b/reproducibility/templates_transfer/s3_synapse_template.sh @@ -0,0 +1,11 @@ +#!/bin/bash + +MOBIE_DIR=/mnt/vast-nhr/projects/nim00007/data/moser/cochlea-lightsheet/mobie_project/cochlea-lightsheet +# e.g. G_EK_000233_L +COCHLEA=$1 +# e.g. synapse_v3 +SYNAPSE=$2 + +rclone --progress copyto "$MOBIE_DIR"/"$COCHLEA"/dataset.json cochlea-lightsheet:cochlea-lightsheet/"$COCHLEA"/dataset.json +rclone --progress copyto "$MOBIE_DIR"/"$COCHLEA"/tables/"$SYNAPSE" cochlea-lightsheet:cochlea-lightsheet/"$COCHLEA"/tables/"$SYNAPSE" + diff --git a/reproducibility/tonotopic_mapping/transfer_mobie_ChReef.py b/reproducibility/tonotopic_mapping/transfer_mobie_ChReef.py new file mode 100644 index 0000000..5fba275 --- /dev/null +++ b/reproducibility/tonotopic_mapping/transfer_mobie_ChReef.py @@ -0,0 +1,60 @@ +#!/usr/bin/python +# -- coding: utf-8 -- +import os +import subprocess + + +mobie_dir = "/mnt/vast-nhr/projects/nim00007/data/moser/cochlea-lightsheet/mobie_project/cochlea-lightsheet" + +COCHLEAE = [ + "M_LR_000143_L", + "M_LR_000144_L", + "M_LR_000145_L", + "M_LR_000153_L", + "M_LR_000155_L", + "M_LR_000189_L", + "M_LR_000143_R", + "M_LR_000144_R", + "M_LR_000145_R", + "M_LR_000153_R", + "M_LR_000155_R", + "M_LR_000189_R", +] +seg = "SGN_v2" + +COCHLEAE = [ + "M_LR_000226_L", + "M_LR_000227_L", + "M_LR_000226_R", + "M_LR_000227_R", +] +seg = "SGN_v2" +# seg = "IHC_v4c" + + +COCHLEAE = [ + "M_AMD_N62_L", + "M_AMD_Runx1_L", + "M_LR_000099_L", + "M_LR_000214_L", +] +seg = "PV_SGN_v2" + +if "SGN" in seg: + tonotopic_dir = os.path.join(mobie_dir, "tables/tonotopic_sgn") +elif "IHC" in seg: + tonotopic_dir = os.path.join(mobie_dir, "tables/tonotopic_ihc") +else: + raise ValueError("Choose either a segmentation channel with 'SGN' or 'IHC'.") + +dry_run = False + +for cochlea in COCHLEAE: + cochlea_table = "-".join(cochlea.split("_")) + seg_table = "-".join(seg.split("_")) + in_path = os.path.join(tonotopic_dir, f"{cochlea_table}_{seg_table}.tsv") + out_path = f"cochlea-lightsheet:cochlea-lightsheet/{cochlea}/tables/{seg}/default.tsv" + print(out_path) + + # "--dry-run" + subprocess.run(["rclone", "copyto", in_path, out_path], stdout=subprocess.PIPE).stdout.decode('utf-8') diff --git a/scripts/synapse_marker_detection/add_synapse_per_ihc.py b/scripts/synapse_marker_detection/add_synapse_per_ihc.py new file mode 100644 index 0000000..2ffa434 --- /dev/null +++ b/scripts/synapse_marker_detection/add_synapse_per_ihc.py @@ -0,0 +1,97 @@ +import argparse +import os + +import numpy as np +import pandas as pd + +from flamingo_tools.s3_utils import get_s3_path, BUCKET_NAME, SERVICE_ENDPOINT + +COCHLEAE = { + "M_LR_000226_L": {"seg_name": "IHC_v4c", "component_list": [1]}, + "M_LR_000226_R": {"seg_name": "IHC_v4c", "component_list": [1]}, + "M_LR_000227_L": {"seg_name": "IHC_v4c", "component_list": [1]}, + "M_LR_000227_R": {"seg_name": "IHC_v4c", "component_list": [1]}, + "M_AMD_OTOF1_L": {"seg_name": "IHC_v4b", "component_list": [3, 11]}, +} + +COCHLEA_DIR = "/mnt/vast-nhr/projects/nim00007/data/moser/cochlea-lightsheet" +OUT_DIR = f"{COCHLEA_DIR}/mobie_project/cochlea-lightsheet/tables/syn_per_ihc" + + +def add_syn_per_ihc(args): + syn_limit = 25 + + if args.output_folder is None: + out_dir = OUT_DIR + else: + out_dir = args.output_folder + + for cochlea in args.cochlea: + if args.seg_version is None: + seg_version = COCHLEAE[cochlea]["seg_name"] + else: + seg_version = args.seg_version + + print(f"Evaluating cochlea {cochlea}.") + + ihc_version = seg_version.split("_")[1] + syn_per_ihc_dir = f"{COCHLEA_DIR}/predictions/synapses/ihc_counts_{ihc_version}" + + if args.component_list is None: + component_list = COCHLEAE[cochlea]["component_list"] + else: + component_list = args.component_list + + s3_path = os.path.join(f"{cochlea}", "tables", seg_version, "default.tsv") + tsv_path, fs = get_s3_path(s3_path, bucket_name=BUCKET_NAME, + service_endpoint=SERVICE_ENDPOINT) + with fs.open(tsv_path, 'r') as f: + ihc_table = pd.read_csv(f, sep="\t") + + # synapse_table + syn_path = os.path.join(syn_per_ihc_dir, f"ihc_count_{cochlea}.tsv") + with open(syn_path, 'r') as f: + syn_table = pd.read_csv(f, sep="\t") + + syn_per_IHC = [-1 for _ in range(len(ihc_table))] + ihc_table.loc[:, "syn_per_IHC"] = syn_per_IHC + + ihc_table.loc[ihc_table['component_labels'].isin(component_list), 'syn_per_IHC'] = 0 + zero_syn = ihc_table[ihc_table["syn_per_IHC"] == 0] + print(f"Total IHC in component: {len(zero_syn)}") + + for label_id, syn_count in zip(list(syn_table["label_id"]), list(syn_table["synapse_count"])): + ihc_table.loc[ihc_table["label_id"] == label_id, "syn_per_IHC"] = syn_count + zero_syn = ihc_table[ihc_table["syn_per_IHC"] > syn_limit] + print(f"IHC in component with more than 25 synapses: {len(zero_syn)}") + zero_syn = ihc_table[ihc_table["syn_per_IHC"] == 0] + print(f"IHC in component without synapses: {len(zero_syn)}") + + syn_per_IHC = list(ihc_table.loc[ihc_table['component_labels'].isin(component_list), 'syn_per_IHC']) + + if args.ihc_syn: + syn_per_IHC = [s for s in syn_per_IHC if s != 0] + + print(f"Mean syn_per_IHC: {round(sum(syn_per_IHC) / len(syn_per_IHC), 2)}") + print(f"Stdv syn_per_IHC: {round(np.std(syn_per_IHC), 2)}") + out_path = os.path.join(out_dir, cochlea + "_syn-per-ihc.tsv") + ihc_table.to_csv(out_path, sep="\t", index=False) + + +def main(): + + parser = argparse.ArgumentParser() + parser.add_argument("-c", "--cochlea", type=str, nargs="+", default=COCHLEAE, help="Cochlea(e) to process.") + parser.add_argument("-o", "--output_folder", type=str, default=None, help="Path to output folder.") + parser.add_argument("-s", "--seg_version", type=str, default=None, help="Path to output folder.") + parser.add_argument("--ihc_syn", action="store_true", help="Consider only IHC with synapses.") + parser.add_argument("--component_list", type=int, nargs="+", default=None, + help="List of IHC components.") + + args = parser.parse_args() + + add_syn_per_ihc(args) + + +if __name__ == "__main__": + main()