Skip to content

Commit 41067ef

Browse files
Fix error in cochlea rescaling
1 parent 40f0813 commit 41067ef

File tree

1 file changed

+22
-12
lines changed

1 file changed

+22
-12
lines changed

scripts/resize_wrongly_scaled_cochleas.py

Lines changed: 22 additions & 12 deletions
Original file line numberDiff line numberDiff line change
@@ -1,19 +1,24 @@
11
import argparse
2-
import sys, os
2+
import os
33

44
import multiprocessing as mp
55
from concurrent import futures
66

77
import imageio.v3 as imageio
8-
import numpy as np
98
import nifty.tools as nt
109
from tqdm import tqdm
1110

1211
from elf.wrapper.resized_volume import ResizedVolume
1312
from elf.io import open_file
1413

14+
1515
def main(input_path, output_folder, scale, input_key, interpolation_order):
16-
input_ = open_file(input_path, "r")[input_key]
16+
if input_path.endswith(".tif"):
17+
input_ = imageio.imread(input_path)
18+
input_chunks = (128,) * 3
19+
else:
20+
input_ = open_file(input_path, "r")[input_key]
21+
input_chunks = input_.chunks
1722

1823
abs_path = os.path.abspath(input_path)
1924
basename = "".join(os.path.basename(abs_path).split(".")[:-1])
@@ -33,17 +38,20 @@ def main(input_path, output_folder, scale, input_key, interpolation_order):
3338
resized_volume = ResizedVolume(input_, new_shape, order=interpolation_order)
3439

3540
output = open_file(output_path, mode="a")
36-
dataset = output.create_dataset(input_key, shape=new_shape, dtype = input_.dtype, chunks=input_.chunks, compression="gzip")
37-
blocking = nt.blocking([0] * ndim, new_shape, input_.chunks)
41+
output_dataset = output.create_dataset(
42+
input_key, shape=new_shape, dtype=input_.dtype,
43+
chunks=input_chunks, compression="gzip"
44+
)
45+
blocking = nt.blocking([0] * ndim, new_shape, input_chunks)
3846

3947
def copy_chunk(block_index):
40-
block = blocking.getBlock(block_index)
41-
volume_index = tuple(slice(begin, end) for (begin, end) in zip(block.begin, block.end))
42-
data = resized_volume[volume_index]
43-
output[volume_index] = data
48+
block = blocking.getBlock(block_index)
49+
volume_index = tuple(slice(begin, end) for (begin, end) in zip(block.begin, block.end))
50+
data = resized_volume[volume_index]
51+
output_dataset[volume_index] = data
4452

4553
with futures.ThreadPoolExecutor(n_threads) as resize_pool:
46-
list(tqdm(resize_pool.map(copy_chunk, range(blocking.numberOfBlocks)), total=blocking.numberOfBlocks))
54+
list(tqdm(resize_pool.map(copy_chunk, range(blocking.numberOfBlocks)), total=blocking.numberOfBlocks))
4755

4856

4957
if __name__ == "__main__":
@@ -52,12 +60,14 @@ def copy_chunk(block_index):
5260
description="Script for resizing microscoopy data in n5 format.")
5361

5462
parser.add_argument('input_file', type=str, help="Input file")
55-
parser.add_argument('output_folder', type=str, help="Output folder. Default resized output is <basename>_resized.n5")
63+
parser.add_argument(
64+
'output_folder', type=str, help="Output folder. Default resized output is <basename>_resized.n5"
65+
)
5666

5767
parser.add_argument('-s', "--scale", type=float, default=0.38, help="Scale of input. Re-scaled to 1.")
5868
parser.add_argument('-k', "--input_key", type=str, default="setup0/timepoint0/s0", help="Input key for n5 file.")
5969
parser.add_argument('-i', "--interpolation_order", type=float, default=3, help="Interpolation order.")
6070

6171
args = parser.parse_args()
6272

63-
main(args.input_file, args.output_folder, args.scale, args.input_key, args.interpolation_order)
73+
main(args.input_file, args.output_folder, args.scale, args.input_key, args.interpolation_order)

0 commit comments

Comments
 (0)