@@ -174,6 +174,7 @@ def load_groundtruth(self, swc_pointer):
174174 print ("\n (1) Load Ground Truth" )
175175 graph_builder = gutil .GraphBuilder (
176176 anisotropy = self .anisotropy ,
177+ is_groundtruth = True ,
177178 label_mask = self .label_mask ,
178179 use_anisotropy = False ,
179180 )
@@ -203,6 +204,7 @@ def load_fragments(self, swc_pointer):
203204 if swc_pointer :
204205 graph_builder = gutil .GraphBuilder (
205206 anisotropy = self .anisotropy ,
207+ is_groundtruth = False ,
206208 selected_ids = self .get_all_node_labels (),
207209 use_anisotropy = self .use_anisotropy ,
208210 )
@@ -464,12 +466,13 @@ def detect_splits(self):
464466 n_missing = n_before - n_after
465467 p_omit = 100 * (n_missing + n_split_edges ) / n_before
466468 p_split = 100 * n_split_edges / n_before
469+ gt_rl = graph .run_length
467470
468471 self .graphs [key ] = graph
469- self .metrics .at [key , "% Omit" ] = p_omit
472+ self .metrics .at [key , "% Omit" ] = round ( p_omit , 2 )
470473 self .metrics .at [key , "# Splits" ] = gutil .count_splits (graph )
471- self .metrics .loc [key , "% Split" ] = p_split
472- self .metrics .loc [key , "GT Run Length" ] = graph . run_length
474+ self .metrics .loc [key , "% Split" ] = round ( p_split , 2 )
475+ self .metrics .loc [key , "GT Run Length" ] = round ( gt_rl , 2 )
473476 pbar .update (1 )
474477
475478 # -- Merge Detection --
@@ -571,7 +574,7 @@ def is_fragment_merge(self, key, label, kdtree):
571574 for leaf in gutil .get_leafs (fragment_graph ):
572575 voxel = fragment_graph .voxels [leaf ]
573576 gt_voxel = util .kdtree_query (kdtree , voxel )
574- if self .physical_dist (gt_voxel , voxel ) > 50 :
577+ if self .physical_dist (gt_voxel , voxel ) > 60 :
575578 visited = self .find_merge_site (
576579 key , kdtree , fragment_graph , leaf , visited
577580 )
@@ -632,10 +635,13 @@ def process_merge_sites(self):
632635
633636 # Save merge sites
634637 if self .save_merges :
638+ row_names = list ()
635639 for i in range (len (self .merge_sites )):
636640 filename = f"merge-{ i + 1 } .swc"
637641 xyz = self .merge_sites .iloc [i ]["World" ]
638642 swc_util .to_zipped_point (self .merge_writer , filename , xyz )
643+ row_names .append (filename )
644+ self .merge_sites .index = row_names
639645 self .merge_writer .close ()
640646
641647 # Update counter
@@ -645,7 +651,7 @@ def process_merge_sites(self):
645651
646652 # Save results
647653 path = os .path .join (self .output_dir , "merge_sites.csv" )
648- self .merge_sites .to_csv (path , index = False )
654+ self .merge_sites .to_csv (path , index = True )
649655
650656
651657 def adjust_metrics (self , key ):
@@ -757,7 +763,7 @@ def quantify_merges(self):
757763 """
758764 for key in self .graphs :
759765 p = self .n_merged_edges [key ] / self .graphs [key ].graph ["n_edges" ]
760- self .metrics .loc [key , "% Merged" ] = 100 * p
766+ self .metrics .loc [key , "% Merged" ] = round ( 100 * p , 2 )
761767
762768 # -- Compute Metrics --
763769 def compute_edge_accuracy (self ):
@@ -776,7 +782,8 @@ def compute_edge_accuracy(self):
776782 for key in self .graphs :
777783 p_omit = self .metrics .loc [key , "% Omit" ]
778784 p_merged = self .metrics .loc [key , "% Merged" ]
779- self .metrics .loc [key , "Edge Accuracy" ] = 100 - p_omit - p_merged
785+ edge_accuracy = round (100 - p_omit - p_merged , 2 )
786+ self .metrics .loc [key , "Edge Accuracy" ] = edge_accuracy
780787
781788 def compute_erl (self ):
782789 """
@@ -799,8 +806,9 @@ def compute_erl(self):
799806 wgt = run_lengths / max (np .sum (run_lengths ), 1 )
800807
801808 erl = np .sum (wgt * run_lengths )
802- self .metrics .loc [key , "ERL" ] = erl
803- self .metrics .loc [key , "Normalized ERL" ] = erl / max (run_length , 1 )
809+ n_erl = round (erl / max (run_length , 1 ), 4 )
810+ self .metrics .loc [key , "ERL" ] = round (erl , 2 )
811+ self .metrics .loc [key , "Normalized ERL" ] = n_erl
804812
805813 def compute_weighted_avg (self , column_name ):
806814 wgt = self .metrics ["GT Run Length" ]
0 commit comments