@@ -1511,8 +1511,6 @@ def estimate_size(self):
15111511 footprint, as the estimates provided by the catalog's parquet metadata
15121512 only account for the size of parquet data pages.
15131513 """
1514- from human_readable import file_size , int_comma
1515-
15161514 warnings .warn ("The estimates provided are approximations." )
15171515
15181516 def get_row_count (stats ):
@@ -1538,11 +1536,11 @@ def get_disk_size(stats):
15381536 orig_cat = hc .read_hats (self .hc_structure .catalog_path )
15391537 expected_cat_rows = int (get_row_count (pixel_stats ))
15401538 row_pct = expected_cat_rows / int (orig_cat .catalog_info .total_rows ) * 100
1541- expected_cat_rows = int_comma ( expected_cat_rows )
1539+ expected_cat_rows = f" { expected_cat_rows :, } "
15421540
15431541 # In-memory and on disk estimates
1544- mem_size = file_size (get_mem_size (pixel_stats ), binary = True )
1545- disk_size = file_size (get_disk_size (pixel_stats ), binary = True )
1542+ mem_size = _human_file_size (get_mem_size (pixel_stats ))
1543+ disk_size = _human_file_size (get_disk_size (pixel_stats ))
15461544
15471545 print (
15481546 f"You selected { len (self .columns )} /{ len (self .all_columns )} columns.\n "
@@ -1551,3 +1549,12 @@ def get_disk_size(stats):
15511549 f"Expect up to { mem_size } in MEMORY.\n "
15521550 f"Expect up to { disk_size } on DISK."
15531551 )
1552+
1553+
1554+ def _human_file_size (size_bytes ):
1555+ """Convert bytes to human readable format (binary units only)."""
1556+ for unit in ["B" , "KiB" , "MiB" , "GiB" , "TiB" ]:
1557+ if size_bytes < 1024 :
1558+ return f"{ size_bytes :.1f} { unit } "
1559+ size_bytes /= 1024
1560+ return f"{ size_bytes :.2f} PiB"
0 commit comments