diff --git a/.github/workflows/build-daily.yml b/.github/workflows/build-daily.yml index a79209cc..acd67dc6 100644 --- a/.github/workflows/build-daily.yml +++ b/.github/workflows/build-daily.yml @@ -9,6 +9,7 @@ on: permissions: contents: read + security-events: read # This is required to handle authentication to our artifact publishing API jobs: build-daily: diff --git a/.github/workflows/build-on-pr.yml b/.github/workflows/build-on-pr.yml index a3ec4636..5112e040 100644 --- a/.github/workflows/build-on-pr.yml +++ b/.github/workflows/build-on-pr.yml @@ -5,6 +5,7 @@ on: permissions: contents: read + security-events: read # This is required to handle authentication to our artifact publishing API jobs: build-pr: diff --git a/.github/workflows/build-on-push.yml b/.github/workflows/build-on-push.yml index beb50e03..f48b6705 100644 --- a/.github/workflows/build-on-push.yml +++ b/.github/workflows/build-on-push.yml @@ -6,6 +6,7 @@ on: permissions: contents: read + security-events: read # This is required to handle authentication to our artifact publishing API jobs: build-daily: diff --git a/.github/workflows/debos.yml b/.github/workflows/debos.yml index e18472e7..a73c60e8 100644 --- a/.github/workflows/debos.yml +++ b/.github/workflows/debos.yml @@ -11,6 +11,7 @@ on: # permissions to none permissions: contents: read + security-events: read # This is required to handle authentication to our artifact publishing API env: # github runs are only unique per repository and may also be re-run; create a @@ -31,7 +32,6 @@ jobs: container: image: debian:trixie volumes: - - /srv/gh-runners/quic-yocto/builds:/fileserver-builds - /srv/gh-runners/quic-yocto/downloads:/fileserver-downloads options: --privileged steps: @@ -95,30 +95,30 @@ jobs: - name: Upload artifacts to fileserver space for builds id: upload_artifacts + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} run: | set -ux - # curl will be used to talk to fileserver; should be installed by - # default - apt -y install curl + # python3-requests is used by publish_aritfacts.py + apt -y install python3-requests # create a directory for the current run - dir="/fileserver-builds/${BUILD_ID}" - mkdir -vp "${dir}" + export BUILD_DIR="/tmp/${BUILD_ID}" + mkdir -vp "${BUILD_DIR}" # copy output files - cp -av rootfs.tar.gz "${dir}" - cp -av dtbs.tar.gz "${dir}" - cp -av disk-ufs.img.gz "${dir}" - cp -av disk-sdcard.img.gz "${dir}" + cp -av rootfs.tar.gz "${BUILD_DIR}" + cp -av dtbs.tar.gz "${BUILD_DIR}" + cp -av disk-ufs.img.gz "${BUILD_DIR}" + cp -av disk-sdcard.img.gz "${BUILD_DIR}" # TODO: separate flash_* directories between UFS and eMMC - tar -cvf "${dir}"/flash-ufs.tar.gz \ + tar -cvf "${BUILD_DIR}"/flash-ufs.tar.gz \ disk-ufs.img1 \ disk-ufs.img2 \ flash_rb3* - tar -cvf "${dir}"/flash-emmc.tar.gz \ + tar -cvf "${BUILD_DIR}"/flash-emmc.tar.gz \ disk-sdcard.img1 \ disk-sdcard.img2 \ flash_rb1* # instruct fileserver to publish this directory - url="${FILESERVER_URL}/${BUILD_ID}/" - curl -X POST -H 'Accept: text/event-stream' "${url}" - echo "url=\"${url}\"" >> $GITHUB_OUTPUT - + export URL="${FILESERVER_URL}/${BUILD_ID}/" + .github/workflows/publish_artifacts.py + echo Image available at: ${URL} diff --git a/.github/workflows/linux.yml b/.github/workflows/linux.yml index 29090cc5..9499de5a 100644 --- a/.github/workflows/linux.yml +++ b/.github/workflows/linux.yml @@ -11,6 +11,7 @@ on: # permissions to none permissions: contents: read + security-events: read # This is required to handle authentication to our artifact publishing API env: # where results will be posted/hosted @@ -33,7 +34,6 @@ jobs: container: image: debian:trixie volumes: - - /srv/gh-runners/quic-yocto/builds:/fileserver-builds - /srv/gh-runners/quic-yocto/downloads:/fileserver-downloads steps: - uses: actions/checkout@v4 @@ -64,19 +64,19 @@ jobs: scripts/build-linux-deb.sh kernel-configs/systemd-boot.config - name: Upload results to fileserver + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} run: | set -ux # dcmd from devscripts will be used to parse .changes file apt -y install --no-install-recommends devscripts - # curl will be used to talk to fileserver; should be installed by - # default - apt -y install curl + # python3-requests is used by publish_aritfacts.py + apt -y install python3-requests # copy to fileserver builds and downloads directories - for dir in "/fileserver-builds/${BUILD_ID}" \ - "/fileserver-downloads/${BUILD_ID}"; do - mkdir -vp "${dir}" - cp -av `dcmd *.changes` "${dir}" - done + export BUILD_DIR="/fileserver-downloads/${BUILD_ID}" + mkdir -vp "${BUILD_DIR}" + cp -av `dcmd *.changes` "${BUILD_DIR}" + # create or update linux-deb-latest symlink mkdir -vp /fileserver-downloads/qcom-deb-images ( @@ -90,6 +90,7 @@ jobs: # perhaps help NFS sync sync # instruct fileserver to publish this directory - url="${FILESERVER_URL}/${BUILD_ID}/" - curl -X POST -H 'Accept: text/event-stream' "${url}" + export URL="${FILESERVER_URL}/${BUILD_ID}/" + .github/workflows/publish_artifacts.py + echo Image available at: ${URL} diff --git a/.github/workflows/publish_artifacts.py b/.github/workflows/publish_artifacts.py new file mode 100755 index 00000000..2e5554a5 --- /dev/null +++ b/.github/workflows/publish_artifacts.py @@ -0,0 +1,111 @@ +#!/usr/bin/env python3 +# Copyright (c) 2025 Qualcomm Innovation Center, Inc. All rights reserved. +# SPDX-License-Identifier: BSD-3-Clause + +from multiprocessing import Pool +import os +import sys +from time import sleep +from typing import List + +import requests + +gh_token = os.environ["GITHUB_TOKEN"] +num_threads_str = os.environ.get("UPLOAD_THREADS", "5") + + +def upload_file(args): + """ + Uploads a file to our file upload service. The service is a GCP CloudRun + project that returns signed URLs to Google Storage objects we can upload to. + """ + try: + url, base, name = args + + headers = { + "Authentication": f"Bearer {gh_token}", + } + + # Obtain the signed-url for GCS using Fibonacci backoff/retries + for x in (1, 2, 3, 5, 0): + r = requests.put(url, headers=headers, allow_redirects=False) + if not r.ok: + correlation_id = r.headers.get("X-Correlation-ID", "?") + if not x: + return ( + name, + f"Unable to get signed url HTTP_{r.status_code}. Correlation ID: {correlation_id} - {r.text}", + ) + else: + print( + f"Error getting signed URL for {name}: Correlation ID: {correlation_id} HTTP_{r.status_code} - {r.text}", + flush=True, + ) + print(f"Retrying in {x} seconds", flush=True) + sleep(x) + + # Upload the file to the signed URL with backoff/retry logic + url = r.headers["location"] + path = os.path.join(base, name) + for x in (1, 2, 3, 0): + r = requests.put( + url, + data=open(path, "rb"), + headers={"Content-type": "application/octet-stream"}, + ) + if not r.ok: + if not x: + return ( + name, + f"Unable to upload content HTTP_{r.status_code} - {r.text}", + ) + else: + print( + f"Unable to upload content for {name}: HTTP_{r.status_code} - {r.text}" + ) + print(f"Retrying in {x} seconds") + sleep(x) + + return name, None + except Exception as e: + return name, str(e) + + +def get_files_to_publish(path: str) -> List[str]: + paths = [] + for root, dirs, files in os.walk(path): + for file in files: + paths.append(os.path.join(root, file)[len(path) :]) + return paths + + +def main(num_threads: int, artifacts_dir: str, base_url: str): + paths = get_files_to_publish(artifacts_dir) + print(f"= Found {len(paths)} files to publish", flush=True) + + failed = False + work = [(f"{base_url}{x}", artifacts_dir, x) for x in paths] + with Pool(num_threads) as p: + results = p.imap_unordered(upload_file, work) + for i, res in enumerate(results): + name, err = res + print(f"= {i+1} of {len(work)} - {name}", flush=True) + if err: + print(f"|-> ERROR: {err}", flush=True) + failed = True + + if failed: + sys.exit(1) + + +if __name__ == "__main__": + BUILD_DIR = os.environ["BUILD_DIR"] + if BUILD_DIR[-1] != "/": + BUILD_DIR = BUILD_DIR + "/" + + URL = os.environ["URL"] + if URL[-1] != "/": + URL = URL + "/" + + num_threads = int(num_threads_str) + main(num_threads, BUILD_DIR, URL) diff --git a/.github/workflows/u-boot.yml b/.github/workflows/u-boot.yml index ccd47dd9..91bb80c0 100644 --- a/.github/workflows/u-boot.yml +++ b/.github/workflows/u-boot.yml @@ -8,6 +8,7 @@ on: # permissions to none permissions: contents: read + security-events: read # This is required to handle authentication to our artifact publishing API env: # where results will be posted/hosted @@ -30,7 +31,6 @@ jobs: container: image: debian:trixie volumes: - - /srv/gh-runners/quic-yocto/builds:/fileserver-builds - /srv/gh-runners/quic-yocto/downloads:/fileserver-downloads steps: - uses: actions/checkout@v4 @@ -55,25 +55,25 @@ jobs: scripts/build-u-boot-rb1.sh - name: Upload results to fileserver + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} run: | set -ux - # curl will be used to talk to fileserver; should be installed by - # default - apt -y install curl - for dir in "/fileserver-builds/${BUILD_ID}" \ - "/fileserver-downloads/qcom-deb-images/u-boot-rb1-latest"; do - mkdir -vp "${dir}" - cp -av \ - u-boot/u-boot-nodtb.bin.gz \ - u-boot/dts/upstream/src/arm64/qcom/qrb2210-rb1.dtb \ - u-boot/u-boot-nodtb.bin.gz-dtb \ - u-boot/u-boot.bin \ - u-boot/rb1-boot.img \ - "${dir}" - done + # python3-requests is used by publish_aritfacts.py + apt -y install python3-requests + + export BUILD_DIR="/fileserver-downloads/qcom-deb-images/u-boot-rb1-latest" + mkdir -vp ${BUILD_DIR} + cp -av \ + u-boot/u-boot-nodtb.bin.gz \ + u-boot/dts/upstream/src/arm64/qcom/qrb2210-rb1.dtb \ + u-boot/u-boot-nodtb.bin.gz-dtb \ + u-boot/u-boot.bin \ + u-boot/rb1-boot.img \ + ${BUILD_DIR} # perhaps help NFS sync sync # instruct fileserver to publish this directory - url="${FILESERVER_URL}/${BUILD_ID}/" - curl -X POST -H 'Accept: text/event-stream' "${url}" - + export URL="${FILESERVER_URL}/${BUILD_ID}/" + .github/workflows/publish_artifacts.py + echo Image available at: ${URL}