Skip to content

Commit d29de78

Browse files
committed
feat: more seg channels
1 parent 9c356aa commit d29de78

File tree

1 file changed

+40
-24
lines changed

1 file changed

+40
-24
lines changed

examples/load_tiff_seg.py

Lines changed: 40 additions & 24 deletions
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,6 @@
11
# %% Import packages
22

3+
from copy import copy
34
import itertools
45
from pathlib import Path
56
from bfio import BioReader
@@ -60,12 +61,11 @@
6061

6162
num_channels = br.shape[-1]
6263
data_type = "uint16"
63-
chunk_size = [256, 256, 128]
6464
volume_size = [br.shape[1], br.shape[0], br.shape[2]] # XYZ
6565

6666
# %% Setup somewhere to hold progress
67+
CHANNEL = 0
6768
progress_dir = OUTPUT_PATH / "progress"
68-
progress_dir.mkdir(exist_ok=True)
6969

7070
# %% Functions for moving data
7171
shape = np.array([br.shape[1], br.shape[0], br.shape[2]])
@@ -79,7 +79,13 @@ def chunked_reader(x_i, y_i, z_i):
7979
z_start, z_end = z_i * chunk_shape[2], min((z_i + 1) * chunk_shape[2], shape[2])
8080

8181
# Read the chunk from the BioReader
82-
chunk = br.read(X=(x_start, x_end), Y=(y_start, y_end), Z=(z_start, z_end))
82+
chunk = br.read(
83+
X=(x_start, x_end), Y=(y_start, y_end), Z=(z_start, z_end), C=(CHANNEL,)
84+
)
85+
chunk = np.atleast_3d(chunk)
86+
# Remove the last flattened dimension
87+
if (len(chunk.shape) > 3) and (chunk.shape[-1] == 1):
88+
chunk = chunk[:, :, :, 0]
8389

8490
# Return the chunk
8591
return chunk.swapaxes(0, 1)
@@ -99,21 +105,22 @@ def process(args):
99105
)
100106
if f_name.exists() and not OVERWRITE:
101107
return
102-
print("Working on", f_name)
103108
rawdata = chunked_reader(x_i, y_i, z_i)
104109
# TEMP
105-
print(np.unique(rawdata))
106110
print(rawdata.shape)
107111
# Create the segmentation mask
108-
dimensions = [start, end]
109-
seg_chunk = create_segmentation_chunk(rawdata, dimensions, convert_non_zero_to=None)
112+
# The writer expects the data to be in ZYX order so need to swap the axes
113+
start_zyx = [start[2], start[1], start[0]]
114+
end_zyx = [end[2], end[1], end[0]]
115+
dimensions = [start_zyx, end_zyx]
116+
seg_chunk = create_segmentation_chunk(rawdata, dimensions, convert_non_zero_to=1)
110117
seg_chunk.write_to_directory(OUTPUT_PATH / "data")
111118
touch(f_name)
112119

113120

114121
# %% Try with a single chunk to see if it works
115-
x_i, y_i, z_i = 0, 0, 0
116-
process((x_i, y_i, z_i))
122+
# x_i, y_i, z_i = 0, 0, 0
123+
# process((x_i, y_i, z_i))
117124

118125
# %% Loop over all the chunks
119126
coords = itertools.product(
@@ -126,34 +133,43 @@ def process(args):
126133
reversed_coords.reverse()
127134

128135
# %% Create the metadata
136+
int_size = [int(size_x), int(size_y), int(size_z)]
137+
real_size = [size_x, size_y, size_z]
129138
metadata = {
130139
"@type": "neuroglancer_multiscale_volume",
131140
"data_type": "uint32",
132141
"num_channels": 1,
133142
"scales": [
134143
{
135-
"chunk_sizes": [chunk_size],
144+
"chunk_sizes": [[int(c) for c in chunk_shape]],
136145
"encoding": "compressed_segmentation",
137-
"copmressed_segmentation_block_size": [8, 8, 8],
138-
"resolution": [size_x, size_y, size_z],
146+
"compressed_segmentation_block_size": [8, 8, 8],
147+
"resolution": int_size,
139148
"key": "data",
140149
"size": volume_size,
141150
}
142151
],
143152
"mesh": "mesh",
144153
"type": "segmentation",
145154
}
146-
write_metadata(
147-
metadata,
148-
OUTPUT_PATH,
149-
overwrite=OVERWRITE,
150-
)
151-
152-
# %% Now create the mesh
153-
mesh_shape = np.array(volume_size)
154-
generate_multiresolution_mesh_from_segmentation(OUTPUT_PATH, "mesh", 3, mesh_shape)
155-
156155

157156
# %% Move the data across with a single worker
158-
for coord in reversed_coords:
159-
process(coord)
157+
original_path = copy(OUTPUT_PATH)
158+
for i in range(0, 1):
159+
OUTPUT_PATH = original_path.with_stem(f"{original_path.stem}_ch{i}")
160+
OUTPUT_PATH.mkdir(exist_ok=True, parents=True)
161+
print("Writing to ", OUTPUT_PATH)
162+
# Make the progress directory
163+
progress_dir = OUTPUT_PATH / "progress"
164+
progress_dir.mkdir(exist_ok=True)
165+
CHANNEL = i
166+
write_metadata(metadata, OUTPUT_PATH)
167+
for coord in reversed_coords:
168+
process(coord)
169+
print("COMPLETED", i)
170+
mesh_shape = np.array(volume_size)
171+
generate_multiresolution_mesh_from_segmentation(
172+
OUTPUT_PATH, "mesh", 2, mesh_shape, fill_missing=True
173+
)
174+
175+
# %% For running all

0 commit comments

Comments
 (0)