77and customizing data retrieval parameters.
88"""
99
10+ from yaspin import yaspin
1011import argparse
1112import json
1213import logging
@@ -211,10 +212,12 @@ def _filter_public_result(self, private_list, public_list):
211212 set ([item ["table_name" ] for item in private_list ])
212213 & set ([item ["table_name" ] for item in public_list ])
213214 )
214- logging .info (
215- f"Found { len (common )} table names existed in both private and public, use it to filter public tables:"
216- )
217- logging .info (json .dumps (common , indent = 1 ))
215+
216+ if not self .disable_logging :
217+ logging .info (
218+ f"Found { len (common )} table names existed in both private and public, use it to filter public tables:"
219+ )
220+ logging .info (json .dumps (common , indent = 1 ))
218221 filtered_public = [item for item in public_list if item ["table_name" ] in common ]
219222 return filtered_public
220223
@@ -450,30 +453,21 @@ def print_all_groups_info(self) -> None:
450453 Separates results by category and displays counts.
451454 This is useful for debugging and understanding what data is available.
452455 """
453-
454456 if not self .data or not self .matching_groups :
455457 logging .info ("No data found, please call get_data() first" )
456458 return
457-
458- logging .info ("peeking table result:" )
459- logging .info (json .dumps (self .data [0 ], indent = 2 ))
460-
461- for item in self .matching_groups .values ():
462- logging .info (f" all { item .category } benchmark results" )
463- names = []
464- for row in item .data :
465- names .append (
466- {
467- "table_name" : row ["table_name" ],
468- "info" : row ["info" ],
469- "counts" : len (row ["rows" ]),
470- }
471- )
472- logging .info (
473- f"\n ============ { item .category } benchmark results({ len (names )} )=================\n "
459+ logging .info (f" all clean benchmark table info from HUD" )
460+ names = []
461+ for item in self .data :
462+ names .append (
463+ {
464+ "table_name" : item .get ("table_name" , "" ),
465+ "groupInfo" : item .get ("groupInfo" , "" ),
466+ "counts" : len (item .get ("rows" , [])),
467+ }
474468 )
475- for name in names :
476- logging .info (json .dumps (name , indent = 2 ))
469+ for name in names :
470+ logging .info (json .dumps (name , indent = 2 ))
477471
478472 def _generate_table_name (self , group_info : dict , fields : list [str ]) -> str :
479473 name = "_" .join (
@@ -568,13 +562,16 @@ def _fetch_execu_torch_data(self, start_time, end_time):
568562 group_row_by_fields = self .query_group_row_by_fields ,
569563 )
570564 params = {k : v for k , v in params_object .__dict__ .items () if v is not None }
571- response = requests .get (url , params = params )
572- if response .status_code == 200 :
573- return response .json ()
574- else :
575- logging .info (f"Failed to fetch benchmark data ({ response .status_code } )" )
576- logging .info (response .text )
577- return None
565+ with yaspin (text = "Waiting for response" , color = "cyan" ) as spinner :
566+ response = requests .get (url , params = params )
567+ if response .status_code == 200 :
568+ spinner .ok ("V" )
569+ return response .json ()
570+ else :
571+ logging .info (f"Failed to fetch benchmark data ({ response .status_code } )" )
572+ logging .info (response .text )
573+ spinner .fail ("x" )
574+ return None
578575
579576 def normalize_string (self , s : str ) -> str :
580577 s = s .lower ().strip ()
@@ -606,16 +603,26 @@ def argparsers():
606603 parser .add_argument (
607604 "--env" , choices = ["local" , "prod" ], default = "prod" , help = "Environment"
608605 )
609- parser .add_argument ("--silent" , action = "store_true" , help = "Disable logging" )
610606
607+ parser .add_argument (
608+ "--no-silent" ,
609+ action = "store_false" ,
610+ dest = "silent" ,
611+ default = True ,
612+ help = "Allow output (disable silent mode)" ,
613+ )
611614 # Options for generate_data
612615 parser .add_argument (
613616 "--outputType" ,
614617 choices = ["json" , "df" , "csv" , "print" , "excel" ],
615618 default = "print" ,
616619 help = "Output format (only for generate_data)" ,
617620 )
618- parser .add_argument ("--outputDir" , default = "." , help = "Output directory" )
621+
622+ parser .add_argument (
623+ "--outputDir" , default = "." , help = "Output directory, default is ."
624+ )
625+
619626 return parser .parse_args ()
620627
621628
@@ -626,4 +633,6 @@ def argparsers():
626633 args .startTime ,
627634 args .endTime ,
628635 )
636+ if not args .silent :
637+ fetcher .print_all_groups_info ()
629638 fetcher .output_data (args .outputType , args .outputDir )
0 commit comments