1111import time
1212import traceback
1313
14+ from filelock import FileLock
1415import numpy as np
1516
1617from ouroboros .helpers .memory_usage import (
3233 join_path ,
3334 generate_tiff_write ,
3435 write_conv_vol ,
35- write_small_intermediate
36+ write_raw_intermediate
3637)
3738from ouroboros .helpers .shapes import DataRange , ImgSliceC
3839
@@ -127,19 +128,16 @@ def _process(self, input_data: any) -> tuple[any, None] | tuple[None, any]:
127128
128129 straightened_volume_path = new_straightened_volume_path
129130
130- # Write huge temp files (need to address)
131131 full_bounding_box = BoundingBox .bound_boxes (volume_cache .bounding_boxes )
132132 write_shape = np .flip (full_bounding_box .get_shape ()).tolist ()
133- print (f"\n Front Projection Shape: { FPShape } " )
134- print (f"\n Back Projection Shape (Z/Y/X):{ write_shape } " )
135133
136134 pipeline_input .output_file_path = (f"{ config .output_file_name } _"
137135 f"{ '_' .join (map (str , full_bounding_box .get_min (np .uint32 )))} " )
138136 folder_path = Path (config .output_file_folder , pipeline_input .output_file_path )
139137 folder_path .mkdir (exist_ok = True , parents = True )
140138
141- i_path = Path ( config . output_file_folder ,
142- f"{ config . output_file_name } _t_ { '_' . join ( map ( str , full_bounding_box . get_min ( np . uint32 ))) } " )
139+ # Intermediate Path
140+ i_path = Path ( config . output_file_folder , f"{ os . getpid () } _ { config . output_file_name } " )
143141
144142 if config .make_single_file :
145143 is_big_tiff = calculate_gigabytes_from_dimensions (
@@ -189,12 +187,12 @@ def _process(self, input_data: any) -> tuple[any, None] | tuple[None, any]:
189187 for chunk , _ , chunk_rects , _ , index in chunk_range .get_iter (chunk_iter ):
190188 bp_futures .append (executor .submit (
191189 process_chunk ,
192- config ,
193- straightened_volume_path ,
194- chunk_rects ,
195- chunk ,
196- index ,
197- full_bounding_box
190+ config = config ,
191+ straightened_volume_path = straightened_volume_path ,
192+ chunk_rects = chunk_rects ,
193+ chunk = chunk ,
194+ index = index ,
195+ full_bounding_box = full_bounding_box
198196 ))
199197
200198 # Track what's written.
@@ -206,8 +204,8 @@ def _process(self, input_data: any) -> tuple[any, None] | tuple[None, any]:
206204 def note_written (write_future ):
207205 nonlocal pages_written
208206 pages_written += 1
209- self .update_progress ((np .sum (processed ) / len (chunk_range )) * (2 / 3 )
210- + (pages_written / num_pages ) * (1 / 3 ))
207+ self .update_progress ((np .sum (processed ) / len (chunk_range )) * (exec_procs / self . num_processes )
208+ + (pages_written / num_pages ) * (write_procs / self . num_processes ))
211209 for key , value in write_future .result ().items ():
212210 self .add_timing (key , value )
213211
@@ -222,8 +220,8 @@ def note_written(write_future):
222220
223221 # Update the progress bar
224222 processed [index ] = 1
225- self .update_progress ((np .sum (processed ) / len (chunk_range )) * (2 / 3 )
226- + (pages_written / num_pages ) * (1 / 3 ))
223+ self .update_progress ((np .sum (processed ) / len (chunk_range )) * (exec_procs / self . num_processes )
224+ + (pages_written / num_pages ) * (write_procs / self . num_processes ))
227225
228226 update_writable_rects (processed , slice_rects , min_dim , writeable , DEFAULT_CHUNK_SIZE )
229227
@@ -233,14 +231,14 @@ def note_written(write_future):
233231 for index in write :
234232 write_futures .append (write_executor .submit (
235233 write_conv_vol ,
236- tif_write (tifffile .imwrite ),
237- i_path .joinpath (f"i_{ index :05} " ),
238- ImgSliceC (* write_shape [1 :], channels ),
239- bool if config .make_backprojection_binary else np .uint16 ,
240- scaling_factors ,
241- folder_path ,
242- index ,
243- config .upsample_order
234+ writer = tif_write (tifffile .imwrite ),
235+ source_path = i_path .joinpath (f"i_{ index :05} .dat " ),
236+ shape = ImgSliceC (* write_shape [1 :], channels ),
237+ dtype = bool if config .make_backprojection_binary else np .uint16 ,
238+ scaling = scaling_factors ,
239+ target_folder = folder_path ,
240+ index = index ,
241+ interpolation = config .upsample_order
244242 ))
245243 write_futures [- 1 ].add_done_callback (note_written )
246244
@@ -271,8 +269,7 @@ def note_written(write_future):
271269
272270 if config .make_single_file :
273271 shutil .rmtree (folder_path )
274- shutil .rmtree (Path (config .output_file_folder ,
275- f"{ config .output_file_name } _t_{ '_' .join (map (str , full_bounding_box .get_min (np .uint32 )))} " ))
272+ shutil .rmtree (i_path )
276273
277274 return None
278275
@@ -320,7 +317,7 @@ def process_chunk(
320317
321318 if values .nbytes == 0 :
322319 # No data to write from this chunk, so return as such.
323- durations ["total_process " ] = [time .perf_counter () - start_total ]
320+ durations ["total_chunk_process " ] = [time .perf_counter () - start_total ]
324321 return durations , index , []
325322
326323 # Save the data
@@ -336,7 +333,9 @@ def process_chunk(
336333 "target_rows" : full_bounding_box .get_shape ()[0 ],
337334 "offset_columns" : offset [1 ],
338335 "offset_rows" : offset [2 ],
336+ "channel_count" : np .uint32 (1 if len (slices .shape ) < 4 else slices .shape [- 1 ]),
339337 }
338+ type_ar = np .array ([yx_vals .dtype .str , values .dtype .str , weights .dtype .str ], dtype = 'S8' )
340339 durations ["split" ] = [time .perf_counter () - start ]
341340
342341 # Gets slices off full array corresponding to each Z value.
@@ -347,27 +346,32 @@ def process_chunk(
347346 durations ["stack" ] = [time .perf_counter () - start ]
348347 start = time .perf_counter ()
349348
350- file_path = Path (config .output_file_folder ,
351- f"{ config .output_file_name } _t_{ '_' .join (map (str , full_bounding_box .get_min (np .uint32 )))} " )
352- file_path .mkdir (exist_ok = True , parents = True )
349+ i_path = Path (config .output_file_folder , f"{ os .getppid ()} _{ config .output_file_name } " )
350+ i_path .mkdir (exist_ok = True , parents = True )
353351
354- def write_z (i , z_slice ):
352+ def write_z (target , z_slice ):
353+ write_raw_intermediate (target ,
354+ np .fromiter (offset_dict .values (), dtype = np .uint32 , count = 5 ).tobytes (),
355+ np .uint32 (len (yx_vals [z_slice ])).tobytes (),
356+ type_ar .tobytes (),
357+ yx_vals [z_slice ].tobytes (), values [z_slice ].tobytes (), weights [z_slice ].tobytes ())
358+
359+ def make_z (i , z_slice ):
355360 offset_z = z_stack [i ] + offset [0 ]
356- file_path .joinpath (f"i_{ offset_z :05} " ).mkdir (exist_ok = True , parents = True )
357- write_small_intermediate (file_path .joinpath (f"i_{ offset_z :05} " , f"{ index } .tif" ),
358- np .fromiter (offset_dict .values (), dtype = np .uint32 , count = 4 ),
359- yx_vals [z_slice ], np .atleast_2d (values )[:, z_slice ], weights [z_slice ])
361+ z_path = i_path .joinpath (f"i_{ offset_z :05} .dat" )
362+ with FileLock (z_path .with_suffix (".lock" )):
363+ write_z (open (z_path , "ab" ), z_slice )
360364
361365 with ThreadPool (12 ) as pool :
362- pool .starmap (write_z , enumerate (z_slices ))
366+ pool .starmap (make_z , enumerate (z_slices ))
363367
364368 durations ["write_intermediate" ] = [time .perf_counter () - start ]
365369 except BaseException as be :
366370 print (f"Error on BP: { be } " )
367371 traceback .print_tb (be .__traceback__ , file = sys .stderr )
368372 raise be
369373
370- durations ["total_process " ] = [time .perf_counter () - start_total ]
374+ durations ["total_chunk_process " ] = [time .perf_counter () - start_total ]
371375
372376 return durations , index , z_stack + offset [0 ]
373377
0 commit comments