Skip to content

Phase 1.2: Build Tooling Modernization #254

Phase 1.2: Build Tooling Modernization

Phase 1.2: Build Tooling Modernization #254

Workflow file for this run

name: wheels-docker
on:
# Build wheels on feature branches and PRs (test only)
push:
branches: ["**"]
tags:
- 'v*'
pull_request:
branches: [master]
# Publish to GitHub Releases when merged to master
# Publish to PyPI when tagged
workflow_dispatch:
env:
# Registry for caching build images
REGISTRY: ghcr.io
IMAGE_NAME: ${{ github.repository }}/wheel-builder
jobs:
identifiers:
# GitHub needs to know where .cicd/workflows/identifiers.yml lives at parse time,
# and submodules aren't included in that context! thus the following does NOT work:
# uses: ./.cicd/workflows/identifiers.yml
# we MUST reference the remote repo directly:
uses: wamp-proto/wamp-cicd/.github/workflows/identifiers.yml@main
# IMPORTANT: we still need .cicd as a Git submodule in the using repo though!
# because e.g. identifiers.yml wants to access scripts/sanitize.sh !
build-wheels:
name: Build wheels (${{ matrix.target.name }})
needs: identifiers
runs-on: ubuntu-latest
container: ${{ matrix.target.base_image }}
env:
BASE_REPO: ${{ needs.identifiers.outputs.base_repo }}
BASE_BRANCH: ${{ needs.identifiers.outputs.base_branch }}
PR_NUMBER: ${{ needs.identifiers.outputs.pr_number }}
PR_REPO: ${{ needs.identifiers.outputs.pr_repo }}
PR_BRANCH: ${{ needs.identifiers.outputs.pr_branch }}
strategy:
fail-fast: false
matrix:
target:
# manylinux_2_34 (glibc 2.34+) - PEP 600 compliant for modern Linux
# see: https://github.com/pypa/manylinux
- name: "manylinux_2_34_x86_64"
base_image: "quay.io/pypa/manylinux_2_34_x86_64"
# Future manylinux images can be added here:
# - name: "manylinux_2_34_aarch64"
# base_image: "quay.io/pypa/manylinux_2_34_aarch64"
# Deactivated for now - focusing on standard manylinux wheels:
# - name: "debian12-amd64"
# base_image: "debian:12"
# - name: "rocky9-amd64"
# base_image: "rockylinux:9"
# - name: "ubuntu2404-amd64"
# base_image: "ubuntu:24.04"
steps:
- name: Checkout code
uses: actions/checkout@v4
with:
submodules: recursive
- name: Install system dependencies
run: |
# manylinux images use yum/dnf and come with many build tools pre-installed
if command -v yum >/dev/null 2>&1; then
# manylinux images (CentOS-based)
yum update -y
yum install -y \
curl \
git \
openssl-devel \
libffi-devel \
zlib-devel \
bzip2-devel \
readline-devel \
sqlite-devel \
ncurses-devel
# Note: snappy-devel may not be available in manylinux base images
yum install -y snappy-devel || echo "snappy-devel not available, skipping"
elif command -v dnf >/dev/null 2>&1; then
# Newer manylinux images might use dnf
dnf update -y
dnf install -y \
curl \
git \
openssl-devel \
libffi-devel \
zlib-devel \
bzip2-devel \
readline-devel \
sqlite-devel \
ncurses-devel
dnf install -y snappy-devel || echo "snappy-devel not available, skipping"
fi
- name: Setup Python environment
run: |
# manylinux images come with multiple Python versions pre-installed in /opt/python/
echo "==> Available Python versions:"
ls -la /opt/python/*/bin/python* 2>/dev/null || echo "No /opt/python found"
# Add all Python versions to PATH for uv/just to discover
for pyver in /opt/python/*/bin; do
if [ -d "$pyver" ]; then
echo "Adding $pyver to PATH"
export PATH="$pyver:$PATH"
fi
done
# Also ensure we have a working python3 symlink
which python3 || ln -sf $(find /opt/python -name python3 | head -1) /usr/local/bin/python3
echo ""
echo "==> Current Python version:"
python3 --version
echo "==> pip version:"
python3 -m pip --version
# Save the updated PATH for subsequent steps
echo "PATH=$PATH" >> $GITHUB_ENV
- name: Install Just
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
run: |
curl --proto '=https' --tlsv1.2 -sSf https://just.systems/install.sh | bash -s -- --to /usr/local/bin
just --version
- name: Install uv
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
run: |
curl -LsSf https://astral.sh/uv/install.sh | sh
# Add to both GITHUB_PATH and current environment
echo "/root/.cargo/bin" >> $GITHUB_PATH
echo "PATH=/root/.cargo/bin:$PATH" >> $GITHUB_ENV
export PATH="/root/.cargo/bin:$PATH"
uv --version
- name: Install Rust
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
run: |
curl --proto '=https' --tlsv1.2 -sSf https://sh.rustup.rs | sh -s -- -y --default-toolchain stable
# Source the Rust environment (as recommended by installer)
source "$HOME/.cargo/env"
# Add to both GITHUB_PATH and current environment
echo "/root/.cargo/bin" >> $GITHUB_PATH
echo "PATH=/root/.cargo/bin:$PATH" >> $GITHUB_ENV
export PATH="/root/.cargo/bin:$PATH"
rustc --version
- name: Verify toolchain
run: |
# Source Rust environment and ensure PATH is set
source "$HOME/.cargo/env" 2>/dev/null || true
export PATH="/root/.cargo/bin:$PATH"
echo "==> Build environment summary:"
echo "Container: ${{ matrix.target.base_image }}"
echo "Just: $(just --version)"
echo "uv: $(uv --version)"
echo "Rust: $(rustc --version)"
echo "Python: $(python3 --version)"
echo "GCC: $(gcc --version | head -1)"
echo "glibc: $(ldd --version 2>/dev/null | head -1 || echo 'N/A')"
- name: Build manylinux wheels with NVX native extension
env:
AUTOBAHN_USE_NVX: 1
run: |
# Source Rust environment and ensure tools are in PATH
source "$HOME/.cargo/env" 2>/dev/null || true
export PATH="/root/.cargo/bin:$PATH"
# Verify environment
echo "==> Environment verification:"
echo "AUTOBAHN_USE_NVX=$AUTOBAHN_USE_NVX"
echo "PATH=$PATH"
echo "Python: $(python3 --version)"
echo "uv: $(uv --version)"
echo "just: $(just --version)"
echo "rustc: $(rustc --version)"
echo "auditwheel: $(auditwheel --version || echo 'not available')"
# Build binary wheels WITH NVX acceleration for manylinux
just build-all
command -v auditwheel >/dev/null || { echo "auditwheel missing, aborting"; exit 1; }
mkdir -p wheelhouse
# Convert linux_x86_64 wheels to multi-platform tag (incl. manylinux_2_34_x86_64) using auditwheel
echo ""
echo "==> Converting wheels to multi-platform tag (incl. manylinux_2_34_x86_64) format..."
for wheel in dist/*.whl; do
if [[ "$wheel" == *"linux_x86_64"* ]]; then
echo "Converting: $(basename $wheel)"
# show autodetected set of platform tags
auditwheel show "$wheel"
# fix/rename wheel based on autodetected platform tags and store in wheelhouse/
# -> auditwheel will encode all (!) supported tags into the wheel filename
# -> Python packaging ecosystem allows wheels to carry multiple platform tags
# -> pip will then pick the most specific tag that works for the current system
# -> so we do not need to manually set: --plat manylinux_2_34_x86_64 as in:
# auditwheel repair "$wheel" --plat manylinux_2_34_x86_64 -w wheelhouse/
auditwheel repair "$wheel" -w wheelhouse/
else
echo "Copying non-linux wheel: $(basename $wheel)"
cp "$wheel" wheelhouse/
fi
done
echo ""
echo "==> Final wheel inventory after manylinux conversion:"
echo ""
ls -la wheelhouse/
for wheel in wheelhouse/*.whl; do
auditwheel show "$wheel"
done
echo ""
echo " Note: auditwheel adds multiple manylinux tags into the filename."
echo " This is expected and pip will resolve correctly."
echo " Do NOT try to simplify by renaming."
echo ""
- name: Force file system sync (post-build, pre-validation)
run: |
echo "======================================================================"
echo "==> Forcing File System Sync (Post-Build)"
echo "======================================================================"
echo ""
echo "Flushing all file system buffers to disk to ensure wheels are fully"
echo "written before validation and checksumming."
echo ""
sync
echo "✅ All buffers flushed to disk"
echo ""
- name: Validate wheels integrity
run: |
set -o pipefail
echo "======================================================================"
echo "==> Validating Wheel Integrity (Fail Fast)"
echo "======================================================================"
echo ""
echo "Installing twine for validation..."
# Ensure pip is available for the Python being used
python3 -m ensurepip --upgrade 2>/dev/null || true
# Install both packaging and twine from master for PEP 639 (Core Metadata 2.4) support
# Note: No --break-system-packages needed in containers (isolated environment)
python3 -m pip install git+https://github.com/pypa/packaging.git
python3 -m pip install git+https://github.com/pypa/twine.git
echo ""
echo "==> Validation environment:"
echo "Python: $(python3 --version)"
echo "setuptools: $(python3 -m pip show setuptools | grep '^Version:' || echo 'not installed')"
echo "packaging: $(python3 -m pip show packaging | grep '^Version:' || echo 'not installed')"
echo "twine: $(twine --version)"
echo ""
# Initialize validation output file
VALIDATION_FILE="wheelhouse/VALIDATION.txt"
echo "Wheel Validation Results - Build Time" > "$VALIDATION_FILE"
echo "======================================" >> "$VALIDATION_FILE"
echo "" >> "$VALIDATION_FILE"
echo "Validation Date: $(date -u +"%Y-%m-%d %H:%M:%S UTC")" >> "$VALIDATION_FILE"
echo "Python: $(python3 --version)" >> "$VALIDATION_FILE"
echo "twine: $(twine --version)" >> "$VALIDATION_FILE"
echo "" >> "$VALIDATION_FILE"
HAS_ERRORS=0
for wheel in wheelhouse/*.whl; do
if [ ! -f "$wheel" ]; then
echo "⚠️ No wheels found in wheelhouse/"
HAS_ERRORS=1
continue
fi
WHEEL_NAME=$(basename "$wheel")
echo "==> Validating: $WHEEL_NAME"
echo "" >> "$VALIDATION_FILE"
echo "Wheel: $WHEEL_NAME" >> "$VALIDATION_FILE"
echo "---" >> "$VALIDATION_FILE"
# Test 1: Can unzip read the wheel?
echo " [1/3] ZIP integrity test..."
if unzip -t "$wheel" > /dev/null 2>&1; then
echo " ✅ ZIP test PASS"
echo " ZIP test: PASS" >> "$VALIDATION_FILE"
else
echo " ❌ ZIP test FAIL - wheel is corrupted!"
echo " This wheel cannot be unzipped and is unusable."
echo " ZIP test: FAIL - wheel is corrupted!" >> "$VALIDATION_FILE"
HAS_ERRORS=1
fi
# Test 2: Python zipfile module validation
echo " [2/3] Python zipfile test..."
if python3 -m zipfile -t "$wheel" > /dev/null 2>&1; then
echo " ✅ Python zipfile test PASS"
echo " Python zipfile test: PASS" >> "$VALIDATION_FILE"
else
echo " ❌ Python zipfile test FAIL - wheel is corrupted!"
echo " Python zipfile test: FAIL - wheel is corrupted!" >> "$VALIDATION_FILE"
HAS_ERRORS=1
fi
# Test 3: twine check (validates wheel metadata and structure)
echo " [3/3] Twine validation..."
twine check "$wheel" 2>&1 | tee /tmp/twine_output.txt
TWINE_EXIT=${PIPESTATUS[0]}
# Fail on nonzero exit or any error-like output
if [ "$TWINE_EXIT" -eq 0 ] && ! grep -Eqi "ERROR|FAILED|InvalidDistribution" /tmp/twine_output.txt; then
echo " ✅ Twine check PASS"
echo " Twine check: PASS" >> "$VALIDATION_FILE"
else
echo " ❌ Twine check FAIL"
cat /tmp/twine_output.txt
echo " Twine check: FAIL" >> "$VALIDATION_FILE"
cat /tmp/twine_output.txt >> "$VALIDATION_FILE"
HAS_ERRORS=1
fi
rm -f /tmp/twine_output.txt
echo ""
done
if [ $HAS_ERRORS -eq 1 ]; then
echo "" >> "$VALIDATION_FILE"
echo "RESULT: VALIDATION FAILED" >> "$VALIDATION_FILE"
echo "======================================================================"
echo "❌ WHEEL VALIDATION FAILED"
echo "======================================================================"
echo ""
echo "One or more wheels failed integrity checks."
echo "This indicates a build or packaging problem."
echo ""
echo "DO NOT PROCEED - corrupted wheels must NOT become artifacts!"
echo ""
exit 1
else
echo "" >> "$VALIDATION_FILE"
echo "RESULT: ALL VALIDATIONS PASSED" >> "$VALIDATION_FILE"
echo "======================================================================"
echo "✅ All wheels validated successfully"
echo "======================================================================"
echo ""
echo "All integrity checks passed. Wheels are valid and ready for upload."
echo ""
echo "Validation results written to: $VALIDATION_FILE"
fi
- name: Generate SHA256 checksums (chain of custody)
run: |
echo "======================================================================"
echo "==> Generating SHA256 Checksums for Chain of Custody"
echo "======================================================================"
echo ""
echo "OpenSSL version:"
openssl version
echo ""
# Force sync before checksumming to ensure files are on disk
echo "Forcing sync before checksumming..."
sync
echo "✅ Buffers flushed"
echo ""
# Change to wheelhouse directory to generate relative paths (basename only)
cd wheelhouse
CHECKSUM_FILE="CHECKSUMS.sha256"
# Generate checksums for all wheels (using basename only)
echo "Generating checksums for wheels..."
for wheel in *.whl; do
if [ -f "$wheel" ]; then
# Sync before each checksum to ensure file is on disk
sync
openssl sha256 "$wheel" | tee -a "$CHECKSUM_FILE"
fi
done
echo ""
echo "==> Generated checksum file:"
cat "$CHECKSUM_FILE"
echo ""
echo "This file will be uploaded with artifacts to verify integrity"
echo "when artifacts are downloaded by the release workflow."
echo ""
# Return to project root
cd ..
- name: Force file system sync (post-checksum, pre-upload)
run: |
echo "======================================================================"
echo "==> Forcing File System Sync (Post-Checksum)"
echo "======================================================================"
echo ""
echo "Final sync to ensure validation results and checksums are on disk"
echo "before artifact packaging."
echo ""
sync
echo "✅ All buffers flushed to disk"
echo ""
- name: Generate build metadata
run: |
BUILD_INFO=wheelhouse/build-info.txt
echo "manylinux Build Information for ${{ matrix.target.name }}" > $BUILD_INFO
echo "========================================================" >> $BUILD_INFO
echo "" >> $BUILD_INFO
echo "Build Date: $(date -u +"%Y-%m-%d %H:%M:%S UTC")" >> $BUILD_INFO
echo "Container: ${{ matrix.target.base_image }}" >> $BUILD_INFO
echo "Platform: $(uname -m)" >> $BUILD_INFO
echo "Build Method: GitHub Actions + manylinux Docker Container" >> $BUILD_INFO
echo "NVX Acceleration: ENABLED (binary wheels with native extensions)" >> $BUILD_INFO
echo "" >> $BUILD_INFO
echo "Per-Wheel ABI / Platform Tags:" >> $BUILD_INFO
echo "------------------------------" >> $BUILD_INFO
for whl in wheelhouse/*.whl; do
echo "- $(basename "$whl")" >> $BUILD_INFO
auditwheel show "$whl" | grep "platform tag" | awk -F'"' '{print " * " $2}' >> $BUILD_INFO
echo "" >> $BUILD_INFO
done
echo "" >> $BUILD_INFO
echo "Global System Information:" >> $BUILD_INFO
echo "--------------------------" >> $BUILD_INFO
echo "- OS: $(cat /etc/os-release | grep PRETTY_NAME | cut -d'\"' -f2 2>/dev/null || echo 'manylinux container')" >> $BUILD_INFO
echo "- Kernel: $(uname -r)" >> $BUILD_INFO
echo "- glibc: $(ldd --version 2>/dev/null | head -1 || echo 'N/A')" >> $BUILD_INFO
echo "- Architecture: $(uname -m)" >> $BUILD_INFO
echo "" >> $BUILD_INFO
echo "Build Tools:" >> $BUILD_INFO
echo "------------" >> $BUILD_INFO
echo "- Just: $(just --version)" >> $BUILD_INFO
echo "- uv: $(uv --version)" >> $BUILD_INFO
echo "- Rust: $(rustc --version)" >> $BUILD_INFO
echo "- Python: $(python3 --version)" >> $BUILD_INFO
echo "- GCC: $(gcc --version | head -1)" >> $BUILD_INFO
echo ""
echo "==> Generated build-info.txt:"
cat $BUILD_INFO
- name: List built artifacts
run: |
echo "==> Built artifacts for ${{ matrix.target.name }}:"
ls -la wheelhouse/ 2>/dev/null || echo "No wheelhouse/ directory found"
echo ""
echo "==> Build metadata:"
cat wheelhouse/build-info.txt 2>/dev/null || echo "No build info found"
echo ""
echo "==> Wheel inventory:"
find wheelhouse/ -name "*.whl" -exec basename {} \; 2>/dev/null | sort || echo "No wheels found"
- name:
Upload wheels, source dist and build metadata with cryptographic verification
uses: wamp-proto/wamp-cicd/actions/upload-artifact-verified@main
with:
name: artifacts-${{ matrix.target.name }}
path: wheelhouse/
retention-days: 30
# GitHub Releases, PyPI, and RTD publishing are now handled by the centralized 'release' workflow