@@ -369,7 +369,6 @@ def segy_to_mdio( # noqa: PLR0913, PLR0915
369369 storage_options_input = storage_options_input or {}
370370 storage_options_output = storage_options_output or {}
371371
372- print ("pre-setup" )
373372 # Open SEG-Y with MDIO's SegySpec. Endianness will be inferred.
374373 mdio_spec = mdio_segy_spec ()
375374 segy_settings = SegySettings (storage_options = storage_options_input )
@@ -379,28 +378,23 @@ def segy_to_mdio( # noqa: PLR0913, PLR0915
379378 binary_header = segy .binary_header
380379 num_traces = segy .num_traces
381380
382- print ("pre-index" )
383381 # Index the dataset using a spec that interprets the user provided index headers.
384382 index_fields : list [HeaderField ] = []
385383 for name , byte , format_ in zip (index_names , index_bytes , index_types , strict = True ):
386384 index_fields .append (HeaderField (name = name , byte = byte , format = format_ ))
387385 mdio_spec_grid = mdio_spec .customize (trace_header_fields = index_fields )
388386 segy_grid = SegyFile (url = segy_path , spec = mdio_spec_grid , settings = segy_settings )
389387
390- print ("pre-get_grid_plan" )
391388 dimensions , chunksize , index_headers = get_grid_plan (
392389 segy_file = segy_grid ,
393390 return_headers = True ,
394391 chunksize = chunksize ,
395392 grid_overrides = grid_overrides ,
396393 )
397394 grid = Grid (dims = dimensions )
398- print ("pre-grid_density_qc" )
399395 grid_density_qc (grid , num_traces )
400- print ("pre-build_map" )
401396 grid .build_map (index_headers )
402397
403- print ("pre-valid_mask" )
404398 # Check grid validity by ensuring every trace's header-index is within dimension bounds
405399 valid_mask = np .ones (grid .num_traces , dtype = bool )
406400 for d_idx in range (len (grid .header_index_arrays )):
@@ -415,7 +409,6 @@ def segy_to_mdio( # noqa: PLR0913, PLR0915
415409 logger .warning ("Ingestion grid shape: %s." , grid .shape )
416410 raise GridTraceCountError (valid_count , num_traces )
417411
418- print ("pre-chunksize" )
419412 if chunksize is None :
420413 dim_count = len (index_names ) + 1
421414 if dim_count == 2 : # noqa: PLR2004
@@ -438,7 +431,6 @@ def segy_to_mdio( # noqa: PLR0913, PLR0915
438431 suffix = [str (idx ) for idx , value in enumerate (suffix ) if value is not None ]
439432 suffix = "" .join (suffix )
440433
441- print ("pre-compressors" )
442434 compressors = get_compressor (lossless , compression_tolerance )
443435 header_dtype = segy .spec .trace .header .dtype .newbyteorder ("=" )
444436 var_conf = MDIOVariableConfig (
@@ -450,7 +442,6 @@ def segy_to_mdio( # noqa: PLR0913, PLR0915
450442 )
451443 config = MDIOCreateConfig (path = mdio_path_or_buffer , grid = grid , variables = [var_conf ])
452444
453- print ("pre-create_empty" )
454445 root_group = create_empty (
455446 config ,
456447 overwrite = overwrite ,
@@ -462,7 +453,6 @@ def segy_to_mdio( # noqa: PLR0913, PLR0915
462453 data_array = data_group [f"chunked_{ suffix } " ]
463454 header_array = meta_group [f"chunked_{ suffix } _trace_headers" ]
464455
465- print ("pre-live_mask" )
466456 live_mask_array = meta_group ["live_mask" ]
467457 # 'live_mask_array' has the same first N–1 dims as 'grid.shape[:-1]'
468458 # Build a ChunkIterator over the live_mask (no sample axis)
@@ -494,12 +484,10 @@ def segy_to_mdio( # noqa: PLR0913, PLR0915
494484
495485 nonzero_count = grid .num_traces
496486
497- print ("pre-write_attribute" )
498487 write_attribute (name = "trace_count" , zarr_group = root_group , attribute = nonzero_count )
499488 write_attribute (name = "text_header" , zarr_group = meta_group , attribute = text_header .split ("\n " ))
500489 write_attribute (name = "binary_header" , zarr_group = meta_group , attribute = binary_header .to_dict ())
501490
502- print ("pre-to_zarr" )
503491 # Write traces
504492 zarr_root = mdio_path_or_buffer # the same path you passed earlier to create_empty
505493 data_var = f"data/chunked_{ suffix } "
@@ -513,10 +501,8 @@ def segy_to_mdio( # noqa: PLR0913, PLR0915
513501 header_var_path = header_var ,
514502 )
515503
516- print ("pre-write_attribute" )
517504 # Write actual stats
518505 for key , value in stats .items ():
519506 write_attribute (name = key , zarr_group = root_group , attribute = value )
520507
521- print ("pre-consolidate_metadata" )
522508 zarr .consolidate_metadata (root_group .store )
0 commit comments