88import boto3
99from botocore .exceptions import ClientError
1010
11- from sdgym .result_writer import S3ResultsWriter
11+ from sdgym .result_writer import LocalResultsWriter , S3ResultsWriter
1212from sdgym .run_benchmark .utils import OUTPUT_DESTINATION_AWS
1313from sdgym .s3 import S3_REGION , parse_s3_path
1414from sdgym .sdgym_result_explorer .result_explorer import SDGymResultsExplorer
@@ -62,7 +62,7 @@ def get_result_folder_name_and_s3_vars(aws_access_key_id, aws_secret_access_key)
6262
6363
6464def upload_results (
65- aws_access_key_id , aws_secret_access_key , folder_name , s3_client , bucket , prefix
65+ aws_access_key_id , aws_secret_access_key , folder_name , s3_client , bucket , prefix , github_env
6666):
6767 """Upload benchmark results to S3."""
6868 result_explorer = SDGymResultsExplorer (
@@ -71,16 +71,31 @@ def upload_results(
7171 aws_secret_access_key = aws_secret_access_key ,
7272 )
7373 result_writer = S3ResultsWriter (s3_client )
74-
74+ local_results_writer = LocalResultsWriter ()
7575 if not result_explorer .all_runs_complete (folder_name ):
7676 LOGGER .warning (f'Run { folder_name } is not complete yet. Exiting.' )
77+ if github_env :
78+ with open (github_env , 'a' ) as env_file :
79+ env_file .write ('SKIP_UPLOAD=true\n ' )
80+
7781 sys .exit (0 )
82+ else :
83+ LOGGER .info (f'Run { folder_name } is complete! Proceeding with summarization...' )
84+ if github_env :
85+ with open (github_env , 'a' ) as env_file :
86+ env_file .write ('SKIP_UPLOAD=false\n ' )
87+ env_file .write (f'FOLDER_NAME={ folder_name } \n ' )
7888
79- LOGGER .info (f'Run { folder_name } is complete! Proceeding with summarization...' )
8089 summary , _ = result_explorer .summarize (folder_name )
8190 result_writer .write_dataframe (
8291 summary , f'{ OUTPUT_DESTINATION_AWS } { folder_name } /{ folder_name } _summary.csv' , index = True
8392 )
93+ local_export_dir = os .environ .get ('GITHUB_LOCAL_RESULTS_DIR' )
94+ if local_export_dir :
95+ local_results_writer .write_dataframe (
96+ summary , f'{ local_export_dir } /{ folder_name } _summary.csv' , index = True
97+ )
98+
8499 write_uploaded_marker (s3_client , bucket , prefix , folder_name )
85100
86101
@@ -91,11 +106,18 @@ def main():
91106 folder_name , s3_client , bucket , prefix = get_result_folder_name_and_s3_vars (
92107 aws_access_key_id , aws_secret_access_key
93108 )
109+ github_env = os .environ .get ('GITHUB_ENV' )
94110 if upload_already_done (s3_client , bucket , prefix , folder_name ):
95111 LOGGER .warning ('Benchmark results have already been uploaded. Exiting.' )
112+ if github_env :
113+ with open (github_env , 'a' ) as env_file :
114+ env_file .write ('SKIP_UPLOAD=true\n ' )
115+
96116 sys .exit (0 )
97117
98- upload_results (aws_access_key_id , aws_secret_access_key , folder_name , s3_client , bucket , prefix )
118+ upload_results (
119+ aws_access_key_id , aws_secret_access_key , folder_name , s3_client , bucket , prefix , github_env
120+ )
99121
100122
101123if __name__ == '__main__' :
0 commit comments