11import argparse
2- import sys , os
2+ import os
33
44import multiprocessing as mp
55from concurrent import futures
66
77import imageio .v3 as imageio
8- import numpy as np
98import nifty .tools as nt
109from tqdm import tqdm
1110
1211from elf .wrapper .resized_volume import ResizedVolume
1312from elf .io import open_file
1413
14+
1515def main (input_path , output_folder , scale , input_key , interpolation_order ):
16- input_ = open_file (input_path , "r" )[input_key ]
16+ if input_path .endswith (".tif" ):
17+ input_ = imageio .imread (input_path )
18+ input_chunks = (128 ,) * 3
19+ else :
20+ input_ = open_file (input_path , "r" )[input_key ]
21+ input_chunks = input_ .chunks
1722
1823 abs_path = os .path .abspath (input_path )
1924 basename = "" .join (os .path .basename (abs_path ).split ("." )[:- 1 ])
@@ -33,17 +38,20 @@ def main(input_path, output_folder, scale, input_key, interpolation_order):
3338 resized_volume = ResizedVolume (input_ , new_shape , order = interpolation_order )
3439
3540 output = open_file (output_path , mode = "a" )
36- dataset = output .create_dataset (input_key , shape = new_shape , dtype = input_ .dtype , chunks = input_ .chunks , compression = "gzip" )
37- blocking = nt .blocking ([0 ] * ndim , new_shape , input_ .chunks )
41+ output_dataset = output .create_dataset (
42+ input_key , shape = new_shape , dtype = input_ .dtype ,
43+ chunks = input_chunks , compression = "gzip"
44+ )
45+ blocking = nt .blocking ([0 ] * ndim , new_shape , input_chunks )
3846
3947 def copy_chunk (block_index ):
40- block = blocking .getBlock (block_index )
41- volume_index = tuple (slice (begin , end ) for (begin , end ) in zip (block .begin , block .end ))
42- data = resized_volume [volume_index ]
43- output [volume_index ] = data
48+ block = blocking .getBlock (block_index )
49+ volume_index = tuple (slice (begin , end ) for (begin , end ) in zip (block .begin , block .end ))
50+ data = resized_volume [volume_index ]
51+ output_dataset [volume_index ] = data
4452
4553 with futures .ThreadPoolExecutor (n_threads ) as resize_pool :
46- list (tqdm (resize_pool .map (copy_chunk , range (blocking .numberOfBlocks )), total = blocking .numberOfBlocks ))
54+ list (tqdm (resize_pool .map (copy_chunk , range (blocking .numberOfBlocks )), total = blocking .numberOfBlocks ))
4755
4856
4957if __name__ == "__main__" :
@@ -52,12 +60,14 @@ def copy_chunk(block_index):
5260 description = "Script for resizing microscoopy data in n5 format." )
5361
5462 parser .add_argument ('input_file' , type = str , help = "Input file" )
55- parser .add_argument ('output_folder' , type = str , help = "Output folder. Default resized output is <basename>_resized.n5" )
63+ parser .add_argument (
64+ 'output_folder' , type = str , help = "Output folder. Default resized output is <basename>_resized.n5"
65+ )
5666
5767 parser .add_argument ('-s' , "--scale" , type = float , default = 0.38 , help = "Scale of input. Re-scaled to 1." )
5868 parser .add_argument ('-k' , "--input_key" , type = str , default = "setup0/timepoint0/s0" , help = "Input key for n5 file." )
5969 parser .add_argument ('-i' , "--interpolation_order" , type = float , default = 3 , help = "Interpolation order." )
6070
6171 args = parser .parse_args ()
6272
63- main (args .input_file , args .output_folder , args .scale , args .input_key , args .interpolation_order )
73+ main (args .input_file , args .output_folder , args .scale , args .input_key , args .interpolation_order )
0 commit comments