Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
75 changes: 65 additions & 10 deletions .github/workflows/debos.yml
Original file line number Diff line number Diff line change
Expand Up @@ -17,8 +17,13 @@ concurrency:
group: ${{ github.workflow }}-${{ github.ref }}
cancel-in-progress: true

env:
# image build id; used for SBOM generation; TODO: should be used in image metadata too
BUILD_ID: ${{ github.run_id }}-${{ github.run_attempt }}

jobs:
build-debos:
name: Build and upload debos recipes
outputs:
url: ${{ steps.upload_artifacts.outputs.url }}
runs-on: [self-hosted, qcom-u2404, arm64]
Expand Down Expand Up @@ -86,23 +91,23 @@ jobs:
debos -t u_boot_rb1:rb1-boot.img \
debos-recipes/qualcomm-linux-debian-flash.yaml

- name: Stage build artifacts for publishing
- name: Stage debos artifacts for publishing
run: |
set -ux
# create a directory for the current run
BUILD_DIR="./uploads"
mkdir -vp "${BUILD_DIR}"
dir="debos-artifacts"
mkdir -v "${dir}"
# copy output files
cp -av rootfs.tar.gz "${BUILD_DIR}"
cp -av dtbs.tar.gz "${BUILD_DIR}"
cp -av disk-ufs.img.gz "${BUILD_DIR}"
cp -av disk-sdcard.img.gz "${BUILD_DIR}"
cp -av rootfs.tar.gz "${dir}"
cp -av dtbs.tar.gz "${dir}"
cp -av disk-ufs.img.gz "${dir}"
cp -av disk-sdcard.img.gz "${dir}"
# TODO: separate flash_* directories between UFS and eMMC
tar -cvf "${BUILD_DIR}"/flash-ufs.tar.gz \
tar -cvf "${dir}"/flash-ufs.tar.gz \
disk-ufs.img1 \
disk-ufs.img2 \
flash_rb3*
tar -cvf "${BUILD_DIR}"/flash-emmc.tar.gz \
tar -cvf "${dir}"/flash-emmc.tar.gz \
disk-sdcard.img1 \
disk-sdcard.img2 \
flash_rb1*
Expand All @@ -111,4 +116,54 @@ jobs:
uses: qualcomm-linux/upload-private-artifact-action@v1
id: upload_artifacts
with:
path: ./uploads
path: debos-artifacts

- name: Unpack rootfs to generate SBOM
run: mkdir -v rootfs && tar -C rootfs -xf rootfs.tar.gz

# Syft is not packaged in Debian; it's available as a binary tarball or
# as container image from upstream; it's available on arm64 and x86
- name: Install Syft
run: |
set -ux
apt -y install curl
curl -sSfL https://raw.githubusercontent.com/anchore/syft/main/install.sh | sh

- name: Generate SBOMs with Syft
run: |
set -ux
bin/syft --version
SYFT_FORMAT_PRETTY=true bin/syft \
-o cyclonedx-json=rootfs-sbom.cyclonedx.json \
-o spdx-json=rootfs-sbom.spdx.json \
-o syft-json=rootfs-sbom.syft.json \
-o syft-text=rootfs-sbom.syft.txt \
-o syft-table \
--parallelism `nproc` \
--select-catalogers debian \
--source-name qualcomm-linux-debian-rootfs \
--source-version "${BUILD_ID}" \
-v \
scan rootfs

- name: Generate license summary from Syft report
run: |
set -ux
scripts/syft-license-summary.py \
--rootfs rootfs rootfs-sbom.syft.json |
tee rootfs-sbom.syft-license-summary.csv.txt

- name: Stage SBOMs for publishing
run: |
set -ux
gzip rootfs-sbom.*
dir="sboms"
mkdir -v sboms
cp -av rootfs-sbom.*.gz sboms

- name: Upload SBOMs as private artifacts
uses: qualcomm-linux/upload-private-artifact-action@v1
id: upload_sbom_artifacts
with:
path: sboms

Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
name: Static analysis of shell scripts with ShellCheck
name: Static analysis of scripts

on:
# run on pull requests to the main branch
Expand All @@ -19,6 +19,34 @@ concurrency:
cancel-in-progress: true

jobs:
flake8:
name: Install and run Flake8 on Python scripts
runs-on: ubuntu-latest
steps:
- name: Install flake8
run: sudo apt update && sudo apt -y install flake8

- uses: actions/checkout@v4
with:
fetch-depth: 0

- name: Run Flake8
run: flake8 scripts/*.py

pylint:
name: Install and run Pylint on Python scripts
runs-on: ubuntu-latest
steps:
- name: Install Pylint
run: sudo apt update && sudo apt -y install pylint

- uses: actions/checkout@v4
with:
fetch-depth: 0

- name: Run Pylint (error mode)
run: pylint --errors-only scripts/*.py

shellcheck:
name: Install and run ShellCheck on shell scripts
runs-on: ubuntu-latest
Expand All @@ -31,5 +59,5 @@ jobs:
fetch-depth: 0

- name: Run ShellCheck
run: shellcheck scripts/*
run: shellcheck scripts/*.sh

77 changes: 77 additions & 0 deletions scripts/syft-license-summary.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,77 @@
#!/usr/bin/env python3
# Copyright (c) Qualcomm Technologies, Inc. and/or its subsidiaries.
# SPDX-License-Identifier: BSD-3-Clause

# input is a Syft JSON file as the first argument; output is a
# human-readable summary of source packages and their licenses in CSV
# format

import json
import hashlib
import argparse
import os
from collections import defaultdict


def load_syft_json(file_path):
with open(file_path, 'r') as f:
return json.load(f)


def sha256_of_file(path):
try:
with open(path, 'rb') as f:
return hashlib.sha256(f.read()).hexdigest()
except Exception:
return "unreadable"


def group_by_source_package(data):
grouped = defaultdict(lambda: {
"binaries": set(),
"licenses": set(),
"copyrights": {},
"source_version": None
})
for artifact in data.get("artifacts", []):
metadata = artifact.get("metadata", {})
binary = metadata.get("package", "unknown")
source = metadata.get("source") or binary
version = metadata.get("version", "")
source_version = metadata.get("sourceVersion") or version
grouped[source]["binaries"].add(binary)
grouped[source]["source_version"] = source_version
for lic in artifact.get("licenses", []):
grouped[source]["licenses"].add(lic.get("value", "unknown"))
for loc in artifact.get("locations", []):
path = loc.get("path", "")
if "copyright" in path:
grouped[source]["copyrights"][binary] = path
return grouped


def print_table(grouped, rootfs_path):
print("source,version,binaries,licenses,copyright_sha256")
for source, data in grouped.items():
binaries = " ".join(sorted(data["binaries"]))
licenses = " ".join(sorted(data["licenses"]))
version = data["source_version"] or "unknown"
hashes = set()
for path in data["copyrights"].values():
full_path = os.path.join(rootfs_path, path.lstrip('/'))
hashes.add(sha256_of_file(full_path))
hash_summary = " ".join(sorted(hashes))
print(f"{source},{version},{binaries},{licenses},{hash_summary}")


if __name__ == "__main__":
parser = argparse.ArgumentParser(
description="Summarize Syft license data.")
parser.add_argument("syft_json", help="Path to the Syft JSON file")
parser.add_argument("--rootfs", required=True,
help="Base path to the root filesystem")
args = parser.parse_args()

syft_data = load_syft_json(args.syft_json)
syft_grouped = group_by_source_package(syft_data)
print_table(syft_grouped, args.rootfs)
Loading