Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions .github/workflows/build-daily.yml
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,7 @@ on:

permissions:
contents: read
security-events: read # This is required to handle authentication to our artifact publishing API

jobs:
build-daily:
Expand Down
1 change: 1 addition & 0 deletions .github/workflows/build-on-pr.yml
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,7 @@ on:

permissions:
contents: read
security-events: read # This is required to handle authentication to our artifact publishing API

jobs:
build-pr:
Expand Down
1 change: 1 addition & 0 deletions .github/workflows/build-on-push.yml
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,7 @@ on:

permissions:
contents: read
security-events: read # This is required to handle authentication to our artifact publishing API

jobs:
build-daily:
Expand Down
32 changes: 16 additions & 16 deletions .github/workflows/debos.yml
Original file line number Diff line number Diff line change
Expand Up @@ -11,6 +11,7 @@ on:
# permissions to none
permissions:
contents: read
security-events: read # This is required to handle authentication to our artifact publishing API

env:
# github runs are only unique per repository and may also be re-run; create a
Expand All @@ -31,7 +32,6 @@ jobs:
container:
image: debian:trixie
volumes:
- /srv/gh-runners/quic-yocto/builds:/fileserver-builds
- /srv/gh-runners/quic-yocto/downloads:/fileserver-downloads
options: --privileged
steps:
Expand Down Expand Up @@ -95,30 +95,30 @@ jobs:

- name: Upload artifacts to fileserver space for builds
id: upload_artifacts
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
run: |
set -ux
# curl will be used to talk to fileserver; should be installed by
# default
apt -y install curl
# python3-requests is used by publish_aritfacts.py
apt -y install python3-requests
# create a directory for the current run
dir="/fileserver-builds/${BUILD_ID}"
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I guess we can drop this volume now:
- /srv/gh-runners/quic-yocto/builds:/fileserver-builds

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

good catch. should be addressed in force push.

mkdir -vp "${dir}"
export BUILD_DIR="/tmp/${BUILD_ID}"
mkdir -vp "${BUILD_DIR}"
# copy output files
cp -av rootfs.tar.gz "${dir}"
cp -av dtbs.tar.gz "${dir}"
cp -av disk-ufs.img.gz "${dir}"
cp -av disk-sdcard.img.gz "${dir}"
cp -av rootfs.tar.gz "${BUILD_DIR}"
cp -av dtbs.tar.gz "${BUILD_DIR}"
cp -av disk-ufs.img.gz "${BUILD_DIR}"
cp -av disk-sdcard.img.gz "${BUILD_DIR}"
# TODO: separate flash_* directories between UFS and eMMC
tar -cvf "${dir}"/flash-ufs.tar.gz \
tar -cvf "${BUILD_DIR}"/flash-ufs.tar.gz \
disk-ufs.img1 \
disk-ufs.img2 \
flash_rb3*
tar -cvf "${dir}"/flash-emmc.tar.gz \
tar -cvf "${BUILD_DIR}"/flash-emmc.tar.gz \
disk-sdcard.img1 \
disk-sdcard.img2 \
flash_rb1*
# instruct fileserver to publish this directory
url="${FILESERVER_URL}/${BUILD_ID}/"
curl -X POST -H 'Accept: text/event-stream' "${url}"
echo "url=\"${url}\"" >> $GITHUB_OUTPUT

export URL="${FILESERVER_URL}/${BUILD_ID}/"
.github/workflows/publish_artifacts.py
echo Image available at: ${URL}
23 changes: 12 additions & 11 deletions .github/workflows/linux.yml
Original file line number Diff line number Diff line change
Expand Up @@ -11,6 +11,7 @@ on:
# permissions to none
permissions:
contents: read
security-events: read # This is required to handle authentication to our artifact publishing API

env:
# where results will be posted/hosted
Expand All @@ -33,7 +34,6 @@ jobs:
container:
image: debian:trixie
volumes:
- /srv/gh-runners/quic-yocto/builds:/fileserver-builds
- /srv/gh-runners/quic-yocto/downloads:/fileserver-downloads
steps:
- uses: actions/checkout@v4
Expand Down Expand Up @@ -64,19 +64,19 @@ jobs:
scripts/build-linux-deb.sh kernel-configs/systemd-boot.config

- name: Upload results to fileserver
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
run: |
set -ux
# dcmd from devscripts will be used to parse .changes file
apt -y install --no-install-recommends devscripts
# curl will be used to talk to fileserver; should be installed by
# default
apt -y install curl
# python3-requests is used by publish_aritfacts.py
apt -y install python3-requests
# copy to fileserver builds and downloads directories
for dir in "/fileserver-builds/${BUILD_ID}" \
"/fileserver-downloads/${BUILD_ID}"; do
mkdir -vp "${dir}"
cp -av `dcmd *.changes` "${dir}"
done
export BUILD_DIR="/fileserver-downloads/${BUILD_ID}"
mkdir -vp "${BUILD_DIR}"
cp -av `dcmd *.changes` "${BUILD_DIR}"

# create or update linux-deb-latest symlink
mkdir -vp /fileserver-downloads/qcom-deb-images
(
Expand All @@ -90,6 +90,7 @@ jobs:
# perhaps help NFS sync
sync
# instruct fileserver to publish this directory
url="${FILESERVER_URL}/${BUILD_ID}/"
curl -X POST -H 'Accept: text/event-stream' "${url}"
export URL="${FILESERVER_URL}/${BUILD_ID}/"
.github/workflows/publish_artifacts.py
echo Image available at: ${URL}

111 changes: 111 additions & 0 deletions .github/workflows/publish_artifacts.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,111 @@
#!/usr/bin/env python3
# Copyright (c) 2025 Qualcomm Innovation Center, Inc. All rights reserved.
# SPDX-License-Identifier: BSD-3-Clause

from multiprocessing import Pool
import os
import sys
from time import sleep
from typing import List

import requests

gh_token = os.environ["GITHUB_TOKEN"]
num_threads_str = os.environ.get("UPLOAD_THREADS", "5")


def upload_file(args):
"""
Uploads a file to our file upload service. The service is a GCP CloudRun
project that returns signed URLs to Google Storage objects we can upload to.
"""
try:
url, base, name = args

headers = {
"Authentication": f"Bearer {gh_token}",
}

# Obtain the signed-url for GCS using Fibonacci backoff/retries
for x in (1, 2, 3, 5, 0):
r = requests.put(url, headers=headers, allow_redirects=False)
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I don't understand why we need two put requests; IIUC, we get redirected by the web service to the actual place where we're supposed to upload? Perhaps this scheme should be capture in a comment in the file?

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

i've added a docstring in the force-push

if not r.ok:
correlation_id = r.headers.get("X-Correlation-ID", "?")
if not x:
return (
name,
f"Unable to get signed url HTTP_{r.status_code}. Correlation ID: {correlation_id} - {r.text}",
)
else:
print(
f"Error getting signed URL for {name}: Correlation ID: {correlation_id} HTTP_{r.status_code} - {r.text}",
flush=True,
)
print(f"Retrying in {x} seconds", flush=True)
sleep(x)

# Upload the file to the signed URL with backoff/retry logic
url = r.headers["location"]
path = os.path.join(base, name)
for x in (1, 2, 3, 0):
r = requests.put(
url,
data=open(path, "rb"),
headers={"Content-type": "application/octet-stream"},
)
if not r.ok:
if not x:
return (
name,
f"Unable to upload content HTTP_{r.status_code} - {r.text}",
)
else:
print(
f"Unable to upload content for {name}: HTTP_{r.status_code} - {r.text}"
)
print(f"Retrying in {x} seconds")
sleep(x)

return name, None
except Exception as e:
return name, str(e)


def get_files_to_publish(path: str) -> List[str]:
paths = []
for root, dirs, files in os.walk(path):
for file in files:
paths.append(os.path.join(root, file)[len(path) :])
return paths


def main(num_threads: int, artifacts_dir: str, base_url: str):
paths = get_files_to_publish(artifacts_dir)
print(f"= Found {len(paths)} files to publish", flush=True)

failed = False
work = [(f"{base_url}{x}", artifacts_dir, x) for x in paths]
with Pool(num_threads) as p:
results = p.imap_unordered(upload_file, work)
for i, res in enumerate(results):
name, err = res
print(f"= {i+1} of {len(work)} - {name}", flush=True)
if err:
print(f"|-> ERROR: {err}", flush=True)
failed = True

if failed:
sys.exit(1)


if __name__ == "__main__":
BUILD_DIR = os.environ["BUILD_DIR"]
if BUILD_DIR[-1] != "/":
BUILD_DIR = BUILD_DIR + "/"

URL = os.environ["URL"]
if URL[-1] != "/":
URL = URL + "/"

num_threads = int(num_threads_str)
main(num_threads, BUILD_DIR, URL)
36 changes: 18 additions & 18 deletions .github/workflows/u-boot.yml
Original file line number Diff line number Diff line change
Expand Up @@ -8,6 +8,7 @@ on:
# permissions to none
permissions:
contents: read
security-events: read # This is required to handle authentication to our artifact publishing API

env:
# where results will be posted/hosted
Expand All @@ -30,7 +31,6 @@ jobs:
container:
image: debian:trixie
volumes:
- /srv/gh-runners/quic-yocto/builds:/fileserver-builds
- /srv/gh-runners/quic-yocto/downloads:/fileserver-downloads
steps:
- uses: actions/checkout@v4
Expand All @@ -55,25 +55,25 @@ jobs:
scripts/build-u-boot-rb1.sh

- name: Upload results to fileserver
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
run: |
set -ux
# curl will be used to talk to fileserver; should be installed by
# default
apt -y install curl
for dir in "/fileserver-builds/${BUILD_ID}" \
"/fileserver-downloads/qcom-deb-images/u-boot-rb1-latest"; do
mkdir -vp "${dir}"
cp -av \
u-boot/u-boot-nodtb.bin.gz \
u-boot/dts/upstream/src/arm64/qcom/qrb2210-rb1.dtb \
u-boot/u-boot-nodtb.bin.gz-dtb \
u-boot/u-boot.bin \
u-boot/rb1-boot.img \
"${dir}"
done
# python3-requests is used by publish_aritfacts.py
apt -y install python3-requests

export BUILD_DIR="/fileserver-downloads/qcom-deb-images/u-boot-rb1-latest"
mkdir -vp ${BUILD_DIR}
cp -av \
u-boot/u-boot-nodtb.bin.gz \
u-boot/dts/upstream/src/arm64/qcom/qrb2210-rb1.dtb \
u-boot/u-boot-nodtb.bin.gz-dtb \
u-boot/u-boot.bin \
u-boot/rb1-boot.img \
${BUILD_DIR}
# perhaps help NFS sync
sync
# instruct fileserver to publish this directory
url="${FILESERVER_URL}/${BUILD_ID}/"
curl -X POST -H 'Accept: text/event-stream' "${url}"

export URL="${FILESERVER_URL}/${BUILD_ID}/"
.github/workflows/publish_artifacts.py
echo Image available at: ${URL}
Loading