1919 "/media/starfish/Storage/metacell/converted/Isl1-GFP_E13-5_F129-3_CMN-R-L_02052024-GLC-stitched"
2020)
2121OUTPUT_PATH .mkdir (exist_ok = True , parents = True )
22+ OVERWRITE = False
2223
2324# %% Load the data
2425br = BioReader (str (FILEPATH ), backend = "bioformats" )
5455num_channels = br .shape [- 1 ]
5556data_type = "uint16"
5657chunk_size = [256 , 256 , 128 , 1 ]
58+ volume_size = [br .shape [1 ], br .shape [0 ], br .shape [2 ]] # XYZ
5759
5860# %% Setup the cloudvolume info
5961info = CloudVolume .create_new_info (
6466 resolution = [size_x , size_y , size_z ],
6567 voxel_offset = [0 , 0 , 0 ],
6668 chunk_size = chunk_size [:- 1 ],
67- volume_size = [ br . shape [ 0 ], br . shape [ 1 ], br . shape [ 2 ]] ,
69+ volume_size = volume_size ,
6870)
6971vol = CloudVolume ("file://" + str (OUTPUT_PATH ), info = info )
7072vol .provenance .description = "Example data conversion"
7779
7880
7981# %% Functions for moving data
80- shape = np .array (br .shape )
82+ shape = np .array ([ br .shape [ 1 ], br . shape [ 0 ], br . shape [ 2 ], br . shape [ 3 ]] )
8183chunk_shape = np .array ([1024 , 1024 , 512 , 1 ]) # this is for reading data
8284num_chunks_per_dim = np .ceil (shape / chunk_shape ).astype (int )
8385
@@ -88,24 +90,34 @@ def chunked_reader(x_i, y_i, z_i, c):
8890 z_start , z_end = z_i * chunk_shape [2 ], min ((z_i + 1 ) * chunk_shape [2 ], shape [2 ])
8991
9092 # Read the chunk from the BioReader
91- chunk = br .read (X = (x_start , x_end ), Y = (y_start , y_end ), Z = (z_start , z_end ), C = (c ,))
92- return np .expand_dims (chunk , axis = - 1 )
93+ chunk = br .read (
94+ X = (x_start , x_end ), Y = (y_start , y_end ), Z = (z_start , z_end ), C = (c ,)
95+ )
96+ # Keep expanding dims until it is the same length as chunk_shape
97+ while len (chunk .shape ) < len (chunk_shape ):
98+ chunk = np .expand_dims (chunk , axis = - 1 )
99+ # Return the chunk
100+ return chunk .swapaxes (0 , 1 )
93101
94102
95103def process (args ):
96104 x_i , y_i , z_i , c = args
97- rawdata = chunk = chunked_reader (x_i , y_i , z_i , c )
98105 start = [x_i * chunk_shape [0 ], y_i * chunk_shape [1 ], z_i * chunk_shape [2 ]]
99106 end = [
100107 min ((x_i + 1 ) * chunk_shape [0 ], shape [0 ]),
101108 min ((y_i + 1 ) * chunk_shape [1 ], shape [1 ]),
102109 min ((z_i + 1 ) * chunk_shape [2 ], shape [2 ]),
103110 ]
104- vol [start [0 ] : end [0 ], start [1 ] : end [1 ], start [2 ] : end [2 ], c ] = rawdata
105- touch (
111+ f_name = (
106112 progress_dir
107113 / f"{ start [0 ]} -{ end [0 ]} _{ start [1 ]} -{ end [1 ]} _{ start [2 ]} -{ end [2 ]} _{ c } .done"
108114 )
115+ if f_name .exists () and not OVERWRITE :
116+ return
117+ print ("Working on" , f_name )
118+ rawdata = chunk = chunked_reader (x_i , y_i , z_i , c )
119+ vol [start [0 ] : end [0 ], start [1 ] : end [1 ], start [2 ] : end [2 ], c ] = rawdata
120+ touch (f_name )
109121
110122
111123# %% Try with a single chunk to see if it works
@@ -122,6 +134,9 @@ def process(args):
122134 range (num_chunks_per_dim [2 ]),
123135 range (num_channels ),
124136)
137+ # Do it in reverse order because the last chunks are most likely to error
138+ reversed_coords = list (coords )
139+ reversed_coords .reverse ()
125140
126141# %% Move the data across with multiple workers
127142# max_workers = 8
@@ -130,7 +145,7 @@ def process(args):
130145# executor.map(process, coords)
131146
132147# %% Move the data across with a single worker
133- for coord in coords :
148+ for coord in reversed_coords :
134149 process (coord )
135150
136151# %% Serve the dataset to be used in neuroglancer
0 commit comments