Skip to content

Commit 88b6542

Browse files
committed
initial support for skipped job status
1 parent 6e9cbdc commit 88b6542

File tree

1 file changed

+83
-31
lines changed

1 file changed

+83
-31
lines changed

.github/actions/create_workflow_report/create_workflow_report.py

Lines changed: 83 additions & 31 deletions
Original file line numberDiff line numberDiff line change
@@ -479,11 +479,15 @@ def format_test_name_for_linewrap(text: str) -> str:
479479

480480
def format_test_status(text: str) -> str:
481481
"""Format the test status for better readability."""
482-
color = (
483-
"red"
484-
if text.lower().startswith("fail")
485-
else "orange" if text.lower() in ("error", "broken", "pending") else "green"
486-
)
482+
if text.lower().startswith("fail"):
483+
color = "red"
484+
elif text.lower() == "skipped":
485+
color = "grey"
486+
elif text.lower() in ("success", "ok", "passed", "pass"):
487+
color = "green"
488+
else:
489+
color = "orange"
490+
487491
return f'<span style="font-weight: bold; color: {color}">{text}</span>'
488492

489493

@@ -511,28 +515,42 @@ def format_results_as_html_table(results) -> str:
511515
return html
512516

513517

514-
def parse_args() -> argparse.Namespace:
515-
parser = argparse.ArgumentParser(description="Create a combined CI report.")
516-
parser.add_argument( # Need the full URL rather than just the ID to query the databases
517-
"--actions-run-url", required=True, help="URL of the actions run"
518-
)
519-
parser.add_argument(
520-
"--pr-number", help="Pull request number for the S3 path", type=int
521-
)
522-
parser.add_argument("--commit-sha", help="Commit SHA for the S3 path")
523-
parser.add_argument(
524-
"--no-upload", action="store_true", help="Do not upload the report"
525-
)
526-
parser.add_argument(
527-
"--known-fails", type=str, help="Path to the file with known fails"
528-
)
529-
parser.add_argument(
530-
"--cves", action="store_true", help="Get CVEs from Grype results"
531-
)
532-
parser.add_argument(
533-
"--mark-preview", action="store_true", help="Mark the report as a preview"
534-
)
535-
return parser.parse_args()
518+
def backfill_skipped_statuses(
519+
job_statuses: pd.DataFrame, pr_number: int, branch: str, commit_sha: str
520+
):
521+
"""
522+
Fill in the job statuses for skipped jobs.
523+
"""
524+
525+
if pr_number == 0:
526+
ref_param = f"REF={branch}"
527+
workflow_name = "MasterCI"
528+
else:
529+
ref_param = f"PR={pr_number}"
530+
workflow_name = "PR"
531+
532+
status_file = f"result_{workflow_name.lower()}.json"
533+
s3_path = f"https://{S3_BUCKET}.s3.amazonaws.com/{ref_param.replace('=', 's/')}/{commit_sha}/{status_file}"
534+
print(s3_path)
535+
response = requests.get(s3_path)
536+
537+
if response.status_code != 200:
538+
return job_statuses
539+
540+
status_data = response.json()
541+
skipped_jobs = []
542+
for job in status_data["results"]:
543+
if job["status"] == "skipped" and len(job["links"]) > 0:
544+
skipped_jobs.append(
545+
{
546+
"job_name": job["name"],
547+
"job_status": job["status"],
548+
"message": job["info"],
549+
"results_link": job["links"][0],
550+
}
551+
)
552+
553+
return pd.concat([job_statuses, pd.DataFrame(skipped_jobs)], ignore_index=True)
536554

537555

538556
def get_build_report_links(
@@ -549,10 +567,16 @@ def get_build_report_links(
549567
build_report_links = {}
550568

551569
for job in job_statuses.itertuples():
552-
if job.job_name not in build_job_names or job.job_status != "success":
553-
continue
554-
555-
build_report_links[job.job_name] = job.results_link
570+
if (
571+
job.job_name in build_job_names
572+
and job.job_status
573+
in (
574+
"success",
575+
"skipped",
576+
)
577+
and job.results_link
578+
):
579+
build_report_links[job.job_name] = job.results_link
556580

557581
if len(build_report_links) > 0:
558582
# Possible that only one build job succeeded, in which case we only have one link.
@@ -594,6 +618,30 @@ def get_build_report_links(
594618
return build_report_links
595619

596620

621+
def parse_args() -> argparse.Namespace:
622+
parser = argparse.ArgumentParser(description="Create a combined CI report.")
623+
parser.add_argument( # Need the full URL rather than just the ID to query the databases
624+
"--actions-run-url", required=True, help="URL of the actions run"
625+
)
626+
parser.add_argument(
627+
"--pr-number", help="Pull request number for the S3 path", type=int
628+
)
629+
parser.add_argument("--commit-sha", help="Commit SHA for the S3 path")
630+
parser.add_argument(
631+
"--no-upload", action="store_true", help="Do not upload the report"
632+
)
633+
parser.add_argument(
634+
"--known-fails", type=str, help="Path to the file with known fails"
635+
)
636+
parser.add_argument(
637+
"--cves", action="store_true", help="Get CVEs from Grype results"
638+
)
639+
parser.add_argument(
640+
"--mark-preview", action="store_true", help="Mark the report as a preview"
641+
)
642+
return parser.parse_args()
643+
644+
597645
def create_workflow_report(
598646
actions_run_url: str,
599647
pr_number: int = None,
@@ -686,6 +734,10 @@ def create_workflow_report(
686734
except Exception as e:
687735
pr_info_html = e
688736

737+
fail_results["job_statuses"] = backfill_skipped_statuses(
738+
fail_results["job_statuses"], pr_number, branch_name, commit_sha
739+
)
740+
689741
high_cve_count = 0
690742
if not cves_not_checked and len(fail_results["docker_images_cves"]) > 0:
691743
high_cve_count = (

0 commit comments

Comments
 (0)