11
11
This script compares the generated CSV coverage files with the ones in the codebase.
12
12
"""
13
13
14
- artifacts_worflow_name = "Check framework coverage changes"
14
+ artifacts_workflow_name = "Check framework coverage changes"
15
15
16
16
17
17
def check_file_exists (file ):
@@ -46,24 +46,24 @@ def compare_files_str(file1, file2):
46
46
return ret
47
47
48
48
49
+ def download_artifact (repo , name , dir , run_id ):
50
+ utils .subprocess_run (["gh" , "run" , "download" , "--repo" ,
51
+ repo , "--name" , name , "--dir" , dir , str (run_id )])
52
+
53
+
49
54
def write_diff_for_run (output_file , repo , run_id ):
50
55
folder1 = "out_base"
51
56
folder2 = "out_merge"
52
57
try :
53
- utils .subprocess_run (["gh" , "run" , "download" , "--repo" , repo , "--name" ,
54
- "csv-framework-coverage-base" , "--dir" , folder1 , str (run_id )])
55
- utils .subprocess_run (["gh" , "run" , "download" , "--repo" , repo , "--name" ,
56
- "csv-framework-coverage-merge" , "--dir" , folder2 , str (run_id )])
57
- utils .subprocess_run (["gh" , "run" , "download" , "--repo" , repo , "--name" ,
58
- "pr" , "--dir" , "pr" , str (run_id )])
58
+ download_artifact (repo , "csv-framework-coverage-base" , folder1 , run_id )
59
+ download_artifact (
60
+ repo , "csv-framework-coverage-merge" , folder2 , run_id )
59
61
60
62
compare_folders (folder1 , folder2 , output_file )
61
63
finally :
62
- if os .path .isdir (folder1 ):
63
- shutil .rmtree (folder1 )
64
-
65
- if os .path .isdir (folder2 ):
66
- shutil .rmtree (folder2 )
64
+ for folder in [folder1 , folder2 ]:
65
+ if os .path .isdir (folder ):
66
+ shutil .rmtree (folder )
67
67
68
68
69
69
def get_comment_text (output_file , repo , run_id ):
@@ -98,6 +98,8 @@ def comment_pr(output_file, repo, run_id):
98
98
# Store diff for current run
99
99
write_diff_for_run (output_file , repo , run_id )
100
100
101
+ download_artifact (repo , "pr" , "pr" , run_id )
102
+
101
103
try :
102
104
with open ("pr/NR" ) as file :
103
105
pr_number = int (file .read ())
@@ -207,7 +209,7 @@ def get_previous_run_id(repo, run_id, pr_number):
207
209
pr_repo = this_run ["head_repository" ]
208
210
209
211
# Get all previous runs that match branch, repo and workflow name:
210
- ids = utils .subprocess_check_output (["gh" , "api" , "-X" , "GET" , "repos/" + repo + "/actions/runs" , "-f" , "event=pull_request" , "-f" , "status=success" , "-f" , "name=\" " + artifacts_worflow_name + "\" " , "--jq" ,
212
+ ids = utils .subprocess_check_output (["gh" , "api" , "-X" , "GET" , "repos/" + repo + "/actions/runs" , "-f" , "event=pull_request" , "-f" , "status=success" , "-f" , "name=\" " + artifacts_workflow_name + "\" " , "--jq" ,
211
213
"[.workflow_runs.[] | select(.head_branch==\" " + pr_branch + "\" and .head_repository.full_name==\" " + pr_repo + "\" ) | { created_at: .created_at, run_id: .id}] | sort_by(.created_at) | reverse | [.[].run_id]" ])
212
214
213
215
ids = json .loads (ids )
@@ -216,8 +218,7 @@ def get_previous_run_id(repo, run_id, pr_number):
216
218
" in the list of matching runs." )
217
219
218
220
for previous_run_id in ids [1 :]:
219
- utils .subprocess_run (["gh" , "run" , "download" , "--repo" , repo ,
220
- "--name" , "pr" , "--dir" , "prev_run_pr" , str (previous_run_id )])
221
+ download_artifact (repo , "pr" , "prev_run_pr" , previous_run_id )
221
222
222
223
try :
223
224
with open ("prev_run_pr/NR" ) as file :
0 commit comments