Skip to content

Commit de5bb05

Browse files
committed
feat: v1 of ome.tiff large image loading
1 parent f2c0794 commit de5bb05

File tree

1 file changed

+111
-0
lines changed

1 file changed

+111
-0
lines changed

examples/load_tiff.py

Lines changed: 111 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,111 @@
1+
import argparse
2+
import itertools
3+
import webbrowser
4+
import neuroglancer
5+
import neuroglancer.cli
6+
from pathlib import Path
7+
from bfio import BioReader
8+
import dask
9+
import dask.array
10+
import numpy as np
11+
12+
HERE = Path(__file__).parent
13+
14+
FILEPATH = Path("x.ome.tiff")
15+
16+
17+
def add_image_layer(state, path, name="image"):
18+
br = BioReader(str(path), backend="bioformats")
19+
chunk_shape = np.array([256, 256, 128, 1])
20+
shape = np.array(br.shape)
21+
num_chunks_per_dim = np.ceil(shape / chunk_shape).astype(int)
22+
padded_chunk_shape = num_chunks_per_dim * chunk_shape
23+
24+
def chunked_reader(x_i, y_i, z_i, c):
25+
x_start, x_end = x_i * chunk_shape[0], min((x_i + 1) * chunk_shape[0], shape[0])
26+
y_start, y_end = y_i * chunk_shape[1], min((y_i + 1) * chunk_shape[1], shape[1])
27+
z_start, z_end = z_i * chunk_shape[2], min((z_i + 1) * chunk_shape[2], shape[2])
28+
29+
# Read the chunk from the BioReader
30+
chunk = br.read(
31+
X=(x_start, x_end), Y=(y_start, y_end), Z=(z_start, z_end), C=(c,)
32+
)
33+
# Extend the chunk to be X, Y, Z, 1 not just X, Y, Z
34+
chunk = np.expand_dims(chunk, axis=-1)
35+
# If the chunk is smaller than the padded chunk shape, pad it
36+
if chunk.shape != tuple(chunk_shape[:3]):
37+
padded_chunk = np.zeros(chunk_shape, dtype=chunk.dtype)
38+
padded_chunk[: chunk.shape[0], : chunk.shape[1], : chunk.shape[2], :] = (
39+
chunk
40+
)
41+
return padded_chunk
42+
return chunk
43+
44+
def chunk_size(x_i, y_i, z_i, c):
45+
x_start, x_end = x_i * chunk_shape[0], min((x_i + 1) * chunk_shape[0], shape[0])
46+
y_start, y_end = y_i * chunk_shape[1], min((y_i + 1) * chunk_shape[1], shape[1])
47+
z_start, z_end = z_i * chunk_shape[2], min((z_i + 1) * chunk_shape[2], shape[2])
48+
49+
return (x_end - x_start, y_end - y_start, z_end - z_start, 1)
50+
51+
lazy_reader = dask.delayed(chunked_reader)
52+
lazy_chunks = [
53+
lazy_reader(x, y, z, c)
54+
for x, y, z, c in itertools.product(*[range(i) for i in num_chunks_per_dim])
55+
]
56+
# chunk_sizes = [
57+
# chunk_size(x, y, z, c)
58+
# for x, y, z, c in itertools.product(*[range(i) for i in num_chunks_per_dim])
59+
# ]
60+
sample = lazy_chunks[
61+
0
62+
].compute() # load the first chunk (assume rest are same shape/dtype)
63+
arrays = [
64+
dask.array.from_delayed(lazy_chunk, dtype=sample.dtype, shape=sample.shape)
65+
for lazy_chunk in lazy_chunks
66+
]
67+
x = dask.array.concatenate(arrays)
68+
print(x.shape, shape, np.prod(x.shape), np.prod(padded_chunk_shape))
69+
# We need to reshape in iterations,
70+
# x.reshape(padded_chunk_shape)
71+
scales = [1, 1, 1, 1]
72+
dimensions = neuroglancer.CoordinateSpace(
73+
names=["x", "y", "z", "c"], units="um", scales=scales
74+
)
75+
local_volume = neuroglancer.LocalVolume(x, dimensions)
76+
state.layers.append(
77+
name=name,
78+
layer=neuroglancer.ImageLayer(
79+
source=local_volume,
80+
volume_rendering_mode="ON",
81+
volume_rendering_depth_samples=400,
82+
),
83+
shader="""
84+
#uicontrol invlerp normalized
85+
void main() {
86+
float val = normalized();
87+
emitRGBA(vec4(val, val, val, val));
88+
}
89+
""",
90+
)
91+
state.layout = "3d"
92+
93+
94+
def launch_nglancer():
95+
ap = argparse.ArgumentParser()
96+
neuroglancer.cli.add_server_arguments(ap)
97+
args = ap.parse_args()
98+
neuroglancer.cli.handle_server_arguments(args)
99+
viewer = neuroglancer.Viewer()
100+
return viewer
101+
102+
103+
def main():
104+
viewer = launch_nglancer()
105+
with viewer.txn() as s:
106+
add_image_layer(s, FILEPATH, "image")
107+
webbrowser.open_new(viewer.get_viewer_url())
108+
109+
110+
if __name__ == "__main__":
111+
main()

0 commit comments

Comments
 (0)