11#!/usr/bin/env python 
22import  argparse 
33import  glob 
4- import  json 
54import  os 
65import  re 
7- import  tempfile 
86from  datetime  import  date , datetime 
97
10- from   huggingface_hub   import   login ,  snapshot_download ,  upload_file 
8+ # Removed HF Hub imports 
119from  slack_sdk  import  WebClient 
1210from  tabulate  import  tabulate 
1311
2220    help = "Directory containing test reports (will search recursively in all subdirectories)" ,
2321)
2422parser .add_argument ("--output_file" , default = None , help = "Path to save the consolidated report (markdown format)" )
25- parser .add_argument ("--hf_dataset_repo" , default = None , help = "Hugging Face dataset repository to store reports" )
26- parser .add_argument ("--upload_to_hub" , action = "store_true" , help = "Whether to upload the report to Hugging Face Hub" )
27- parser .add_argument ("--compare_with_previous" , action = "store_true" , help = "Compare with the previous report from Hub" )
23+ # Removed HF dataset related arguments 
2824
2925
3026def  parse_stats_file (file_path ):
@@ -287,26 +283,7 @@ def generate_report(consolidated_data):
287283    report .append ("# Diffusers Nightly Test Report" )
288284    report .append (f"Generated on: { datetime .now ().strftime ('%Y-%m-%d %H:%M:%S' )} \n " )
289285
290-     # Add comparison section if available 
291-     comparison  =  consolidated_data .get ("comparison" )
292-     previous_date  =  consolidated_data .get ("previous_date" )
293- 
294-     if  comparison :
295-         # Determine comparison header based on previous date 
296-         if  previous_date :
297-             report .append (f"## New Failures Since { previous_date }  " )
298-         else :
299-             report .append ("## New Failures" )
300- 
301-         # New failures 
302-         new_failures  =  comparison .get ("new_failures" , [])
303-         if  new_failures :
304-             report .append (f"🔴 { len (new_failures )}   new failing tests compared to previous report:\n " )
305-             for  i , test  in  enumerate (new_failures , 1 ):
306-                 report .append (f"{ i }  . `{ test }  `" )
307-             report .append ("" )
308-         else :
309-             report .append ("No new test failures detected! 🎉\n " )
286+     # Removed comparison section 
310287
311288    # Add summary section 
312289    total  =  consolidated_data ["total_stats" ]
@@ -469,27 +446,7 @@ def create_slack_payload(consolidated_data):
469446        },
470447    ]
471448
472-     # Add new failures section if available 
473-     comparison  =  consolidated_data .get ("comparison" )
474-     previous_date  =  consolidated_data .get ("previous_date" )
475- 
476-     if  comparison  and  "new_failures"  in  comparison :
477-         new_failures  =  comparison ["new_failures" ]
478- 
479-         if  previous_date :
480-             title  =  f"*New Failures Since { previous_date }  :*" 
481-         else :
482-             title  =  "*New Failures:*" 
483- 
484-         if  new_failures :
485-             message  =  f"{ title } \n " 
486-             for  i , test  in  enumerate (new_failures [:10 ], 1 ):  # Limit to first 10 
487-                 message  +=  f"{ i }  . `{ test }  `\n " 
488- 
489-             if  len (new_failures ) >  10 :
490-                 message  +=  f"_...and { len (new_failures ) -  10 }   more_\n " 
491- 
492-             payload .append ({"type" : "section" , "text" : {"type" : "mrkdwn" , "text" : message }})
449+     # Removed comparison section 
493450
494451    # Add failed test suites summary 
495452    failed_suites  =  [
@@ -549,96 +506,7 @@ def create_slack_payload(consolidated_data):
549506    return  payload 
550507
551508
552- def  download_previous_report (repo_id ):
553-     """Download the most recent report from the HF dataset repository.""" 
554-     try :
555-         # Create a temporary directory 
556-         with  tempfile .TemporaryDirectory () as  tmp_dir :
557-             # Download the repository content 
558-             snapshot_download (repo_id = repo_id , local_dir = tmp_dir , repo_type = "dataset" )
559- 
560-             # Find the most recent report file 
561-             report_files  =  glob .glob (os .path .join (tmp_dir , "report_*.json" ))
562-             if  not  report_files :
563-                 print ("No previous reports found in the repository." )
564-                 return  None , None 
565- 
566-             # Sort by date (assuming report_YYYY-MM-DD.json format) 
567-             report_files .sort (reverse = True )
568-             latest_file  =  report_files [0 ]
569- 
570-             # Extract date from filename (report_YYYY-MM-DD.json) 
571-             report_date  =  os .path .basename (latest_file ).split ("." )[0 ].split ("_" )[1 ]
572- 
573-             # Read the most recent report 
574-             with  open (latest_file , "r" ) as  f :
575-                 return  json .load (f ), report_date 
576-     except  Exception  as  e :
577-         print (f"Error downloading previous report: { e }  " )
578-         return  None , None 
579- 
580- 
581- def  compare_reports (current_data , previous_data ):
582-     """Compare current test results with previous ones to identify new failures.""" 
583-     if  not  previous_data :
584-         return  {"new_failures" : []}
585- 
586-     # Get current and previous failed tests 
587-     current_failures  =  set ()
588-     for  suite_name , suite_data  in  current_data ["test_suites" ].items ():
589-         for  failure  in  suite_data ["failures" ]:
590-             current_failures .add (failure ["test" ])
591- 
592-     previous_failures  =  set ()
593-     for  suite_name , suite_data  in  previous_data ["test_suites" ].items ():
594-         for  failure  in  suite_data ["failures" ]:
595-             previous_failures .add (failure ["test" ])
596- 
597-     # Find new failures 
598-     new_failures  =  current_failures  -  previous_failures 
599- 
600-     return  {"new_failures" : list (new_failures )}
601- 
602- 
603- def  upload_report_to_hub (data , report_text , repo_id ):
604-     """Upload the report to the Hugging Face Hub dataset repository.""" 
605-     try :
606-         # Check if HF_TOKEN is available 
607-         hf_token  =  os .environ .get ("HF_TOKEN" )
608-         if  not  hf_token :
609-             print ("HF_TOKEN environment variable not set. Cannot upload to Hub." )
610-             return  False 
611- 
612-         # Login to Hugging Face 
613-         login (token = hf_token )
614- 
615-         # Create a temporary directory 
616-         with  tempfile .TemporaryDirectory () as  tmp_dir :
617-             # Generate filename based on current date 
618-             today  =  date .today ().strftime ("%Y-%m-%d" )
619-             json_filename  =  f"report_{ today }  .json" 
620-             md_filename  =  f"report_{ today }  .md" 
621- 
622-             # Save report as JSON and Markdown 
623-             json_path  =  os .path .join (tmp_dir , json_filename )
624-             md_path  =  os .path .join (tmp_dir , md_filename )
625- 
626-             with  open (json_path , "w" ) as  f :
627-                 json .dump (data , f , indent = 2 )
628- 
629-             with  open (md_path , "w" ) as  f :
630-                 f .write (report_text )
631- 
632-             # Upload files to Hub 
633-             upload_file (path_or_fileobj = json_path , path_in_repo = json_filename , repo_id = repo_id , repo_type = "dataset" )
634- 
635-             upload_file (path_or_fileobj = md_path , path_in_repo = md_filename , repo_id = repo_id , repo_type = "dataset" )
636- 
637-             print (f"Report successfully uploaded to { repo_id }  " )
638-             return  True 
639-     except  Exception  as  e :
640-         print (f"Error uploading report to Hub: { e }  " )
641-         return  False 
509+ # Removed HF dataset related functions 
642510
643511
644512def  main (args ):
@@ -654,22 +522,11 @@ def main(args):
654522    if  consolidated_data ["total_stats" ]["tests" ] ==  0 :
655523        print (f"Warning: No test results found in '{ args .reports_dir }  ' or its subdirectories." )
656524
657-     # Compare with previous report if requested 
658-     comparison_data  =  None 
659-     if  args .compare_with_previous  and  args .hf_dataset_repo :
660-         previous_data , previous_date  =  download_previous_report (args .hf_dataset_repo )
661-         if  previous_data :
662-             comparison_data  =  compare_reports (consolidated_data , previous_data )
663-             # Add comparison data and previous report date to consolidated data 
664-             consolidated_data ["comparison" ] =  comparison_data 
665-             consolidated_data ["previous_date" ] =  previous_date 
525+     # Removed comparison section 
666526
667527    # Generate markdown report 
668528    report  =  generate_report (consolidated_data )
669529
670-     # Print report to stdout 
671-     print (report )
672- 
673530    # Save report to file if specified 
674531    if  args .output_file :
675532        # Create parent directories if they don't exist 
@@ -679,13 +536,16 @@ def main(args):
679536
680537        with  open (args .output_file , "w" ) as  f :
681538            f .write (report )
682-         print (f"Report saved to { args .output_file }  " )
683539
684-     # Upload to Hugging Face Hub if requested 
685-     if  args .upload_to_hub  and  args .hf_dataset_repo :
686-         upload_report_to_hub (consolidated_data , report , args .hf_dataset_repo )
540+         # Only print the report when saving to file 
541+         print (report )
542+ 
543+     # Send GitHub workflow summary if running in GitHub Actions 
544+     if  os .environ .get ("GITHUB_STEP_SUMMARY" ):
545+         with  open (os .environ ["GITHUB_STEP_SUMMARY" ], "a" ) as  f :
546+             f .write (report )
687547
688-     # Send to Slack if token is available 
548+     # Send to Slack if token is available (optional, can be disabled)  
689549    slack_token  =  os .environ .get ("SLACK_API_TOKEN" )
690550    if  slack_token  and  args .slack_channel_name :
691551        payload  =  create_slack_payload (consolidated_data )
0 commit comments