Skip to content

release

release #337

Workflow file for this run

name: release
on:
# Auto-trigger after any required workflow completes
# Release job checks if ALL required workflows are done before proceeding
workflow_run:
workflows: ["wheels", "wheels-docker", "wheels-arm64", "main"]
types: [completed]
# Manual dispatch for debugging
workflow_dispatch:
jobs:
check-workflows:
name: Check if all workflows completed
runs-on: ubuntu-latest
outputs:
all_complete: ${{ steps.check.outputs.all_complete }}
wheels_run_id: ${{ steps.check.outputs.wheels_run_id }}
wheels_docker_run_id: ${{ steps.check.outputs.wheels_docker_run_id }}
wheels_arm64_run_id: ${{ steps.check.outputs.wheels_arm64_run_id }}
main_run_id: ${{ steps.check.outputs.main_run_id }}
is_tag: ${{ steps.check.outputs.is_tag }}
tag_name: ${{ steps.check.outputs.tag_name }}
# Artifact names with hash suffixes
# NOTE: Linux x86_64 wheels come from wheels-docker.yml (artifact_manylinux_x86_64)
# wheels.yml only produces macOS wheels, Windows wheels, and source distribution
artifact_macos_wheels: ${{ steps.check.outputs.artifact_macos_wheels }}
artifact_windows_wheels: ${{ steps.check.outputs.artifact_windows_wheels }}
artifact_source_dist: ${{ steps.check.outputs.artifact_source_dist }}
artifact_manylinux_x86_64: ${{ steps.check.outputs.artifact_manylinux_x86_64 }}
artifact_arm64_cp311: ${{ steps.check.outputs.artifact_arm64_cp311 }}
artifact_arm64_cp312: ${{ steps.check.outputs.artifact_arm64_cp312 }}
artifact_arm64_cp313: ${{ steps.check.outputs.artifact_arm64_cp313 }}
artifact_arm64_cp314: ${{ steps.check.outputs.artifact_arm64_cp314 }}
artifact_arm64_pypy311: ${{ steps.check.outputs.artifact_arm64_pypy311 }}
steps:
- name: Check all required workflows completed
id: check
uses: actions/github-script@v7
with:
script: |
const requiredWorkflows = ['wheels', 'wheels-docker', 'wheels-arm64', 'main'];
// Handle both workflow_run and workflow_dispatch triggers
const commitSha = context.payload.workflow_run?.head_sha || context.sha;
const triggeredBy = context.payload.workflow_run?.name || 'manual (workflow_dispatch)';
console.log('─────────────────────────────────────────────────');
console.log('🔍 Checking workflow completion status');
console.log('─────────────────────────────────────────────────');
console.log(`Event: ${context.eventName}`);
console.log(`Commit SHA: ${commitSha}`);
console.log(`Triggered by: ${triggeredBy}`);
console.log('');
// Get all workflow runs for this commit
const { data: runs } = await github.rest.actions.listWorkflowRunsForRepo({
owner: context.repo.owner,
repo: context.repo.repo,
head_sha: commitSha,
per_page: 100
});
// Group by workflow name and find latest run for each
const latestRuns = {};
for (const run of runs.workflow_runs) {
const workflowName = run.name;
if (requiredWorkflows.includes(workflowName)) {
if (!latestRuns[workflowName] || run.id > latestRuns[workflowName].id) {
latestRuns[workflowName] = run;
}
}
}
// Check if all required workflows completed successfully
console.log('Required workflows status:');
const allComplete = requiredWorkflows.every(name => {
const run = latestRuns[name];
const complete = run && run.status === 'completed' && run.conclusion === 'success';
const status = run ? `${run.status}/${run.conclusion}` : 'not found';
console.log(` ${complete ? '✅' : '⏳'} ${name.padEnd(20)} : ${status}`);
return complete;
});
console.log('');
if (!allComplete) {
console.log('⏳ Not all workflows complete yet - exiting early');
console.log(' This is normal! Release will proceed once all workflows finish.');
} else {
console.log('✅ All workflows complete - proceeding with release!');
}
console.log('─────────────────────────────────────────────────');
core.setOutput('all_complete', allComplete ? 'true' : 'false');
// Output run IDs for artifact downloads
core.setOutput('wheels_run_id', latestRuns['wheels']?.id || '');
core.setOutput('wheels_docker_run_id', latestRuns['wheels-docker']?.id || '');
core.setOutput('wheels_arm64_run_id', latestRuns['wheels-arm64']?.id || '');
core.setOutput('main_run_id', latestRuns['main']?.id || '');
// Query artifact names with meta-checksum suffixes
if (allComplete) {
console.log('');
console.log('─────────────────────────────────────────────────');
console.log('🔍 Querying unique artifact names');
console.log('─────────────────────────────────────────────────');
// Helper function to find artifact by prefix
async function findArtifact(runId, prefix) {
if (!runId) return '';
try {
const { data: artifacts } = await github.rest.actions.listWorkflowRunArtifacts({
owner: context.repo.owner,
repo: context.repo.repo,
run_id: runId
});
const artifact = artifacts.artifacts.find(a => a.name.startsWith(prefix + '-'));
if (artifact) {
console.log(` ✅ ${prefix.padEnd(50)} → ${artifact.name}`);
return artifact.name;
} else {
console.log(` ⚠️ ${prefix.padEnd(50)} → NOT FOUND`);
return '';
}
} catch (error) {
console.log(` ❌ ${prefix.padEnd(50)} → ERROR: ${error.message}`);
return '';
}
}
// Query artifacts from wheels workflow
// NOTE: Linux x86_64 wheels are built in wheels-docker.yml (manylinux container)
// wheels.yml only produces: macOS wheels, Windows wheels, and source distribution
const wheelsRunId = latestRuns['wheels']?.id;
core.setOutput('artifact_macos_wheels', await findArtifact(wheelsRunId, 'wheels-macos-arm64'));
core.setOutput('artifact_windows_wheels', await findArtifact(wheelsRunId, 'wheels-windows-x86_64'));
core.setOutput('artifact_source_dist', await findArtifact(wheelsRunId, 'source-distribution'));
// Query artifacts from wheels-docker workflow (Linux x86_64 manylinux wheels)
const wheelsDockerRunId = latestRuns['wheels-docker']?.id;
core.setOutput('artifact_manylinux_x86_64', await findArtifact(wheelsDockerRunId, 'artifacts-docker-manylinux_2_28_x86_64'));
// Query artifacts from wheels-arm64 workflow
const wheelsArm64RunId = latestRuns['wheels-arm64']?.id;
core.setOutput('artifact_arm64_cp311', await findArtifact(wheelsArm64RunId, 'artifacts-arm64-cpython-3.11-manylinux_2_28_aarch64'));
core.setOutput('artifact_arm64_cp312', await findArtifact(wheelsArm64RunId, 'artifacts-arm64-cpython-3.12-manylinux_2_28_aarch64'));
core.setOutput('artifact_arm64_cp313', await findArtifact(wheelsArm64RunId, 'artifacts-arm64-cpython-3.13-manylinux_2_28_aarch64'));
core.setOutput('artifact_arm64_cp314', await findArtifact(wheelsArm64RunId, 'artifacts-arm64-cpython-3.14-manylinux_2_28_aarch64'));
core.setOutput('artifact_arm64_pypy311', await findArtifact(wheelsArm64RunId, 'artifacts-arm64-pypy-3.11-manylinux_2_36_aarch64'));
console.log('─────────────────────────────────────────────────');
}
// Check if this is a tag push (handles both "refs/tags/vX.Y.Z" and "vX.Y.Z" formats)
const ref = context.payload.workflow_run?.head_branch || context.ref;
const isTag = ref && (ref.startsWith('refs/tags/v') || ref.match(/^v\d+\.\d+\.\d+/));
const tagName = isTag ? ref.replace('refs/tags/', '') : '';
console.log('');
console.log(`Is tag: ${isTag}`);
console.log(`Tag name: ${tagName}`);
core.setOutput('is_tag', isTag ? 'true' : 'false');
core.setOutput('tag_name', tagName);
identifiers:
needs: check-workflows
if: needs.check-workflows.outputs.all_complete == 'true'
# GitHub needs to know where .cicd/workflows/identifiers.yml lives at parse time,
# and submodules aren't included in that context! thus the following does NOT work:
# uses: ./.cicd/workflows/identifiers.yml
# we MUST reference the remote repo directly:
uses: wamp-proto/wamp-cicd/.github/workflows/identifiers.yml@main
# IMPORTANT: we still need .cicd as a Git submodule in the using repo though!
# because e.g. identifiers.yml wants to access scripts/sanitize.sh !
# Development GitHub releases (for master branch builds)
release-development:
name: Development GitHub Release
needs: [check-workflows, identifiers]
runs-on: ubuntu-latest
# Only create releases for development builds (explicit positive list)
if: |
needs.check-workflows.outputs.all_complete == 'true' &&
(github.event_name == 'workflow_dispatch' ||
(github.event_name == 'workflow_run' && github.event.workflow_run.conclusion == 'success')) &&
(needs.identifiers.outputs.release_type == 'development' || needs.identifiers.outputs.release_type == 'nightly')
env:
RELEASE_TYPE: ${{ needs.identifiers.outputs.release_type }}
RELEASE_NAME: ${{ needs.identifiers.outputs.release_name }}
steps:
- name: Checkout code
uses: actions/checkout@v4
with:
submodules: recursive
# NOTE: Linux x86_64 wheels come from wheels-docker.yml (manylinux container)
# See "Download manylinux x86_64 wheels" step below
- name: Download and verify macOS wheels
uses: wamp-proto/wamp-cicd/actions/download-artifact-verified@main
with:
name: ${{ needs.check-workflows.outputs.artifact_macos_wheels }}
path: ${{ github.workspace }}/dist/
run-id: ${{ needs.check-workflows.outputs.wheels_run_id }}
github-token: ${{ secrets.GITHUB_TOKEN }}
max-attempts: 5
retry-delay: 30
- name: Download and verify Windows wheels
uses: wamp-proto/wamp-cicd/actions/download-artifact-verified@main
with:
name: ${{ needs.check-workflows.outputs.artifact_windows_wheels }}
path: ${{ github.workspace }}/dist/
run-id: ${{ needs.check-workflows.outputs.wheels_run_id }}
github-token: ${{ secrets.GITHUB_TOKEN }}
max-attempts: 5
retry-delay: 30
- name: Download and verify source distribution
uses: wamp-proto/wamp-cicd/actions/download-artifact-verified@main
with:
name: ${{ needs.check-workflows.outputs.artifact_source_dist }}
path: ${{ github.workspace }}/dist/
run-id: ${{ needs.check-workflows.outputs.wheels_run_id }}
github-token: ${{ secrets.GITHUB_TOKEN }}
max-attempts: 5
retry-delay: 30
- name: Download and verify manylinux x86_64 wheels
uses: wamp-proto/wamp-cicd/actions/download-artifact-verified@main
with:
name: ${{ needs.check-workflows.outputs.artifact_manylinux_x86_64 }}
path: ${{ github.workspace }}/dist-manylinux-x86_64/
run-id: ${{ needs.check-workflows.outputs.wheels_docker_run_id }}
github-token: ${{ secrets.GITHUB_TOKEN }}
max-attempts: 5
retry-delay: 30
- name: Download and verify ARM64 CPython 3.11 wheels
uses: wamp-proto/wamp-cicd/actions/download-artifact-verified@main
with:
name: ${{ needs.check-workflows.outputs.artifact_arm64_cp311 }}
path: ${{ github.workspace }}/dist-arm64-cp311/
run-id: ${{ needs.check-workflows.outputs.wheels_arm64_run_id }}
github-token: ${{ secrets.GITHUB_TOKEN }}
max-attempts: 5
retry-delay: 30
- name: Download and verify ARM64 CPython 3.12 wheels
uses: wamp-proto/wamp-cicd/actions/download-artifact-verified@main
with:
name: ${{ needs.check-workflows.outputs.artifact_arm64_cp312 }}
path: ${{ github.workspace }}/dist-arm64-cp312/
run-id: ${{ needs.check-workflows.outputs.wheels_arm64_run_id }}
github-token: ${{ secrets.GITHUB_TOKEN }}
max-attempts: 5
retry-delay: 30
- name: Download and verify ARM64 CPython 3.13 wheels
uses: wamp-proto/wamp-cicd/actions/download-artifact-verified@main
with:
name: ${{ needs.check-workflows.outputs.artifact_arm64_cp313 }}
path: ${{ github.workspace }}/dist-arm64-cp313/
run-id: ${{ needs.check-workflows.outputs.wheels_arm64_run_id }}
github-token: ${{ secrets.GITHUB_TOKEN }}
max-attempts: 5
retry-delay: 30
- name: Download and verify ARM64 CPython 3.14 wheels
uses: wamp-proto/wamp-cicd/actions/download-artifact-verified@main
with:
name: ${{ needs.check-workflows.outputs.artifact_arm64_cp314 }}
path: ${{ github.workspace }}/dist-arm64-cp314/
run-id: ${{ needs.check-workflows.outputs.wheels_arm64_run_id }}
github-token: ${{ secrets.GITHUB_TOKEN }}
max-attempts: 5
retry-delay: 30
- name: Download and verify ARM64 PyPy 3.11 wheels
uses: wamp-proto/wamp-cicd/actions/download-artifact-verified@main
with:
name: ${{ needs.check-workflows.outputs.artifact_arm64_pypy311 }}
path: ${{ github.workspace }}/dist-arm64-pypy311/
run-id: ${{ needs.check-workflows.outputs.wheels_arm64_run_id }}
github-token: ${{ secrets.GITHUB_TOKEN }}
max-attempts: 5
retry-delay: 30
- name: Flush file system buffers
run: |
echo "======================================================================"
echo "Flushing all file system buffers after artifact downloads."
echo "Ensures all downloaded files are on disk before checksum verification."
echo ""
sync
echo "✅ All buffers flushed to disk"
echo ""
- name: Re-verify wheel checksums (chain of custody)
run: |
echo "======================================================================"
echo "==> Wheel Checksum Re-Verification (Chain of Custody)"
echo "======================================================================"
echo ""
echo "OpenSSL version:"
openssl version
echo ""
echo "Re-verifying wheel integrity after artifact download."
echo "Detecting corruption during GitHub Actions artifact transfer."
echo ""
HAS_ERRORS=0
WHEELS_VERIFIED=0
# Function to verify checksums in a directory
verify_directory() {
local DIR=$1
local DESC=$2
if [ ! -d "$DIR" ]; then
echo "⚠️ Directory not found: $DIR"
return
fi
if [ ! -f "$DIR/CHECKSUMS.sha256" ]; then
echo "⚠️ No CHECKSUMS.sha256 found in $DIR"
return
fi
echo "==> Re-verifying $DESC..."
cd "$DIR"
while IFS= read -r line; do
# Parse openssl output: "SHA256(file.whl)= checksum" or "SHA2-256(file.whl)= checksum"
ORIGINAL_CHECKSUM=$(echo "$line" | awk -F'= ' '{print $2}')
WHEEL_FILE=$(echo "$line" | sed 's/SHA\(2-\)\?256(\(.*\))=.*/\2/')
if [ ! -f "$WHEEL_FILE" ]; then
echo "❌ CRITICAL: Checksum file references missing wheel: $WHEEL_FILE"
echo " Original checksum line: $line"
HAS_ERRORS=1
continue
fi
# Re-compute current checksum
CURRENT_CHECKSUM=$(openssl sha256 "$WHEEL_FILE" | awk '{print $2}')
if [ "$CURRENT_CHECKSUM" = "$ORIGINAL_CHECKSUM" ]; then
echo "✅ $(basename $WHEEL_FILE): checksum verified"
WHEELS_VERIFIED=$((WHEELS_VERIFIED + 1))
else
echo "❌ $(basename $WHEEL_FILE): CHECKSUM MISMATCH!"
echo " Original: $ORIGINAL_CHECKSUM"
echo " Current: $CURRENT_CHECKSUM"
echo " => Artifact CORRUPTED during transfer!"
HAS_ERRORS=1
fi
done < CHECKSUMS.sha256
cd - > /dev/null
echo ""
}
# Verify all downloaded artifacts (use absolute paths for Docker-built artifacts)
verify_directory "${{ github.workspace }}/dist-manylinux-x86_64" "manylinux x86_64 wheels"
verify_directory "${{ github.workspace }}/dist-arm64-cp311" "ARM64 CPython 3.11 wheels"
verify_directory "${{ github.workspace }}/dist-arm64-cp312" "ARM64 CPython 3.12 wheels"
verify_directory "${{ github.workspace }}/dist-arm64-cp313" "ARM64 CPython 3.13 wheels"
verify_directory "${{ github.workspace }}/dist-arm64-cp314" "ARM64 CPython 3.14 wheels"
verify_directory "${{ github.workspace }}/dist-arm64-pypy311" "ARM64 PyPy 3.11 wheels"
if [ $HAS_ERRORS -eq 1 ]; then
echo "======================================================================"
echo "❌ CHECKSUM RE-VERIFICATION FAILED"
echo "======================================================================"
echo ""
echo "One or more wheels failed checksum verification."
echo "This indicates CORRUPTION during GitHub Actions artifact transfer:"
echo " 1. Build workflow created valid wheel + checksum"
echo " 2. GitHub Actions corrupted wheel during upload/storage/download"
echo " 3. Downloaded wheel checksum doesn't match original"
echo ""
echo "DO NOT PROCEED WITH RELEASE!"
echo ""
exit 1
elif [ $WHEELS_VERIFIED -eq 0 ]; then
echo "======================================================================"
echo "❌ CHECKSUM RE-VERIFICATION FAILED - NO WHEELS VERIFIED"
echo "======================================================================"
echo ""
echo "Zero wheels were verified. This means:"
echo " 1. No CHECKSUMS.sha256 files were found, OR"
echo " 2. All wheels referenced in checksums were missing"
echo ""
echo "This is a critical failure - we cannot confirm wheel integrity."
echo ""
echo "DO NOT PROCEED WITH RELEASE!"
echo ""
exit 1
else
echo "======================================================================"
echo "✅ All wheel checksums verified successfully ($WHEELS_VERIFIED wheels)"
echo "======================================================================"
echo ""
echo "Chain of custody confirmed: build workflows → release workflow"
echo "No corruption detected during artifact transfer."
fi
- name: Consolidate all artifacts
run: |
echo "======================================================================"
echo "==> Consolidating all artifacts to dist/"
echo "======================================================================"
mkdir -p ${{ github.workspace }}/dist/
# Copy wheels from all directories (use absolute paths for Docker-built artifacts)
for dir in ${{ github.workspace }}/dist-manylinux-x86_64 ${{ github.workspace }}/dist-arm64-*; do
if [ -d "$dir" ]; then
echo "Copying wheels from $dir/"
cp -v "$dir"/*.whl ${{ github.workspace }}/dist/ 2>/dev/null || echo "No wheels in $dir/"
fi
done
echo ""
- name: Validate and clean release fileset for GitHub
uses: wamp-proto/wamp-cicd/actions/check-release-fileset@main
with:
distdir: dist
mode: strict
keep-metadata: true # Keep CHECKSUMS for user verification
targets: |
cpy311-linux-x86_64-manylinux_2_28
cpy312-linux-x86_64-manylinux_2_28
cpy313-linux-x86_64-manylinux_2_28
cpy314-linux-x86_64-manylinux_2_28
pypy311-linux-x86_64-manylinux_2_28
cpy311-linux-aarch64-manylinux_2_28
cpy312-linux-aarch64-manylinux_2_28
cpy313-linux-aarch64-manylinux_2_28
cpy314-linux-aarch64-manylinux_2_28
pypy311-linux-aarch64-manylinux_2_34
cpy311-macos-arm64
cpy312-macos-arm64
cpy313-macos-arm64
cpy314-macos-arm64
pypy311-macos-arm64
cpy311-win-amd64
cpy312-win-amd64
cpy313-win-amd64
cpy314-win-amd64
source
- name: List all downloaded artifacts
run: |
echo "======================================================================"
echo "Downloaded artifacts:"
echo "======================================================================"
ls -lh dist/
echo ""
echo "Wheel count: $(ls dist/*.whl 2>/dev/null | wc -l)"
echo "Source dist count: $(ls dist/*.tar.gz 2>/dev/null | wc -l)"
echo "======================================================================"
- name: Generate consolidated checksums
run: |
cd dist
echo "Generating consolidated SHA256 checksums..."
sha256sum *.whl *.tar.gz > CHECKSUMS-ALL.sha256
echo ""
echo "======================================================================"
echo "Consolidated checksums:"
echo "======================================================================"
cat CHECKSUMS-ALL.sha256
cd ..
- name: Create Development Release
uses: softprops/action-gh-release@v2
with:
tag_name: dev-${{ github.sha }}
name: Development Build ${{ github.sha }}
body: |
## Development Build
Automated development build from commit ${{ github.sha }}
**⚠️ This is a development build and should not be used in production!**
Built wheels for:
- Linux x86_64 (native + manylinux_2_34)
- Linux ARM64 (manylinux_2_28 + manylinux_2_36 for PyPy)
- macOS ARM64
- Windows x86_64
Includes source distribution (.tar.gz)
See CHECKSUMS-ALL.sha256 for file integrity verification.
files: dist/*
draft: false
prerelease: true
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
# Production releases (for version tags)
release-production:
name: Production Release (PyPI)
needs: [check-workflows, identifiers]
runs-on: ubuntu-latest
# Only publish to PyPI for stable releases (version tags)
if: |
needs.check-workflows.outputs.all_complete == 'true' &&
(github.event_name == 'workflow_dispatch' ||
(github.event_name == 'workflow_run' && github.event.workflow_run.conclusion == 'success')) &&
needs.identifiers.outputs.release_type == 'stable'
env:
RELEASE_TYPE: ${{ needs.identifiers.outputs.release_type }}
RELEASE_NAME: ${{ needs.identifiers.outputs.release_name }}
environment: pypi
permissions:
id-token: write # Required for PyPI trusted publishing (OIDC)
contents: write # Required for creating GitHub releases
steps:
- name: Checkout code
uses: actions/checkout@v4
with:
submodules: recursive
# NOTE: Linux x86_64 wheels come from wheels-docker.yml (manylinux container)
# See "Download manylinux x86_64 wheels" step below
- name: Download and verify macOS wheels
uses: wamp-proto/wamp-cicd/actions/download-artifact-verified@main
with:
name: ${{ needs.check-workflows.outputs.artifact_macos_wheels }}
path: ${{ github.workspace }}/dist/
run-id: ${{ needs.check-workflows.outputs.wheels_run_id }}
github-token: ${{ secrets.GITHUB_TOKEN }}
max-attempts: 5
retry-delay: 30
- name: Download and verify Windows wheels
uses: wamp-proto/wamp-cicd/actions/download-artifact-verified@main
with:
name: ${{ needs.check-workflows.outputs.artifact_windows_wheels }}
path: ${{ github.workspace }}/dist/
run-id: ${{ needs.check-workflows.outputs.wheels_run_id }}
github-token: ${{ secrets.GITHUB_TOKEN }}
max-attempts: 5
retry-delay: 30
- name: Download and verify source distribution
uses: wamp-proto/wamp-cicd/actions/download-artifact-verified@main
with:
name: ${{ needs.check-workflows.outputs.artifact_source_dist }}
path: ${{ github.workspace }}/dist/
run-id: ${{ needs.check-workflows.outputs.wheels_run_id }}
github-token: ${{ secrets.GITHUB_TOKEN }}
max-attempts: 5
retry-delay: 30
- name: Download and verify manylinux x86_64 wheels
uses: wamp-proto/wamp-cicd/actions/download-artifact-verified@main
with:
name: ${{ needs.check-workflows.outputs.artifact_manylinux_x86_64 }}
path: ${{ github.workspace }}/dist-manylinux-x86_64/
run-id: ${{ needs.check-workflows.outputs.wheels_docker_run_id }}
github-token: ${{ secrets.GITHUB_TOKEN }}
max-attempts: 5
retry-delay: 30
- name: Download and verify ARM64 CPython 3.11 wheels
uses: wamp-proto/wamp-cicd/actions/download-artifact-verified@main
with:
name: ${{ needs.check-workflows.outputs.artifact_arm64_cp311 }}
path: ${{ github.workspace }}/dist-arm64-cp311/
run-id: ${{ needs.check-workflows.outputs.wheels_arm64_run_id }}
github-token: ${{ secrets.GITHUB_TOKEN }}
max-attempts: 5
retry-delay: 30
- name: Download and verify ARM64 CPython 3.12 wheels
uses: wamp-proto/wamp-cicd/actions/download-artifact-verified@main
with:
name: ${{ needs.check-workflows.outputs.artifact_arm64_cp312 }}
path: ${{ github.workspace }}/dist-arm64-cp312/
run-id: ${{ needs.check-workflows.outputs.wheels_arm64_run_id }}
github-token: ${{ secrets.GITHUB_TOKEN }}
max-attempts: 5
retry-delay: 30
- name: Download and verify ARM64 CPython 3.13 wheels
uses: wamp-proto/wamp-cicd/actions/download-artifact-verified@main
with:
name: ${{ needs.check-workflows.outputs.artifact_arm64_cp313 }}
path: ${{ github.workspace }}/dist-arm64-cp313/
run-id: ${{ needs.check-workflows.outputs.wheels_arm64_run_id }}
github-token: ${{ secrets.GITHUB_TOKEN }}
max-attempts: 5
retry-delay: 30
- name: Download and verify ARM64 CPython 3.14 wheels
uses: wamp-proto/wamp-cicd/actions/download-artifact-verified@main
with:
name: ${{ needs.check-workflows.outputs.artifact_arm64_cp314 }}
path: ${{ github.workspace }}/dist-arm64-cp314/
run-id: ${{ needs.check-workflows.outputs.wheels_arm64_run_id }}
github-token: ${{ secrets.GITHUB_TOKEN }}
max-attempts: 5
retry-delay: 30
- name: Download and verify ARM64 PyPy 3.11 wheels
uses: wamp-proto/wamp-cicd/actions/download-artifact-verified@main
with:
name: ${{ needs.check-workflows.outputs.artifact_arm64_pypy311 }}
path: ${{ github.workspace }}/dist-arm64-pypy311/
run-id: ${{ needs.check-workflows.outputs.wheels_arm64_run_id }}
github-token: ${{ secrets.GITHUB_TOKEN }}
max-attempts: 5
retry-delay: 30
- name: Flush file system buffers
run: |
echo "======================================================================"
echo "Flushing all file system buffers after artifact downloads."
echo "Ensures all downloaded files are on disk before checksum verification."
echo ""
sync
echo "✅ All buffers flushed to disk"
echo ""
- name: Re-verify wheel checksums (chain of custody)
run: |
echo "======================================================================"
echo "==> Wheel Checksum Re-Verification (Chain of Custody)"
echo "======================================================================"
echo ""
echo "OpenSSL version:"
openssl version
echo ""
echo "Re-verifying wheel integrity after artifact download."
echo "Detecting corruption during GitHub Actions artifact transfer."
echo ""
HAS_ERRORS=0
WHEELS_VERIFIED=0
# Function to verify checksums in a directory
verify_directory() {
local DIR=$1
local DESC=$2
if [ ! -d "$DIR" ]; then
echo "⚠️ Directory not found: $DIR"
return
fi
if [ ! -f "$DIR/CHECKSUMS.sha256" ]; then
echo "⚠️ No CHECKSUMS.sha256 found in $DIR"
return
fi
echo "==> Re-verifying $DESC..."
cd "$DIR"
while IFS= read -r line; do
# Parse openssl output: "SHA256(file.whl)= checksum" or "SHA2-256(file.whl)= checksum"
ORIGINAL_CHECKSUM=$(echo "$line" | awk -F'= ' '{print $2}')
WHEEL_FILE=$(echo "$line" | sed 's/SHA\(2-\)\?256(\(.*\))=.*/\2/')
if [ ! -f "$WHEEL_FILE" ]; then
echo "❌ CRITICAL: Checksum file references missing wheel: $WHEEL_FILE"
echo " Original checksum line: $line"
HAS_ERRORS=1
continue
fi
# Re-compute current checksum
CURRENT_CHECKSUM=$(openssl sha256 "$WHEEL_FILE" | awk '{print $2}')
if [ "$CURRENT_CHECKSUM" = "$ORIGINAL_CHECKSUM" ]; then
echo "✅ $(basename $WHEEL_FILE): checksum verified"
WHEELS_VERIFIED=$((WHEELS_VERIFIED + 1))
else
echo "❌ $(basename $WHEEL_FILE): CHECKSUM MISMATCH!"
echo " Original: $ORIGINAL_CHECKSUM"
echo " Current: $CURRENT_CHECKSUM"
echo " => Artifact CORRUPTED during transfer!"
HAS_ERRORS=1
fi
done < CHECKSUMS.sha256
cd - > /dev/null
echo ""
}
# Verify all downloaded artifacts (use absolute paths for Docker-built artifacts)
verify_directory "${{ github.workspace }}/dist-manylinux-x86_64" "manylinux x86_64 wheels"
verify_directory "${{ github.workspace }}/dist-arm64-cp311" "ARM64 CPython 3.11 wheels"
verify_directory "${{ github.workspace }}/dist-arm64-cp312" "ARM64 CPython 3.12 wheels"
verify_directory "${{ github.workspace }}/dist-arm64-cp313" "ARM64 CPython 3.13 wheels"
verify_directory "${{ github.workspace }}/dist-arm64-cp314" "ARM64 CPython 3.14 wheels"
verify_directory "${{ github.workspace }}/dist-arm64-pypy311" "ARM64 PyPy 3.11 wheels"
if [ $HAS_ERRORS -eq 1 ]; then
echo "======================================================================"
echo "❌ CHECKSUM RE-VERIFICATION FAILED"
echo "======================================================================"
echo ""
echo "One or more wheels failed checksum verification."
echo "This indicates CORRUPTION during GitHub Actions artifact transfer:"
echo " 1. Build workflow created valid wheel + checksum"
echo " 2. GitHub Actions corrupted wheel during upload/storage/download"
echo " 3. Downloaded wheel checksum doesn't match original"
echo ""
echo "DO NOT PROCEED WITH RELEASE!"
echo ""
exit 1
elif [ $WHEELS_VERIFIED -eq 0 ]; then
echo "======================================================================"
echo "❌ CHECKSUM RE-VERIFICATION FAILED - NO WHEELS VERIFIED"
echo "======================================================================"
echo ""
echo "Zero wheels were verified. This means:"
echo " 1. No CHECKSUMS.sha256 files were found, OR"
echo " 2. All wheels referenced in checksums were missing"
echo ""
echo "This is a critical failure - we cannot confirm wheel integrity."
echo ""
echo "DO NOT PROCEED WITH RELEASE!"
echo ""
exit 1
else
echo "======================================================================"
echo "✅ All wheel checksums verified successfully ($WHEELS_VERIFIED wheels)"
echo "======================================================================"
echo ""
echo "Chain of custody confirmed: build workflows → release workflow"
echo "No corruption detected during artifact transfer."
fi
- name: Consolidate all artifacts
run: |
echo "======================================================================"
echo "==> Consolidating all artifacts to dist/"
echo "======================================================================"
mkdir -p ${{ github.workspace }}/dist/
# Copy wheels from all directories (use absolute paths for Docker-built artifacts)
for dir in ${{ github.workspace }}/dist-manylinux-x86_64 ${{ github.workspace }}/dist-arm64-*; do
if [ -d "$dir" ]; then
echo "Copying wheels from $dir/"
cp -v "$dir"/*.whl ${{ github.workspace }}/dist/ 2>/dev/null || echo "No wheels in $dir/"
fi
done
echo ""
- name: List all downloaded artifacts
run: |
echo "======================================================================"
echo "Downloaded artifacts for release ${{ needs.check-workflows.outputs.tag_name }}:"
echo "======================================================================"
ls -lh dist/
echo ""
echo "Wheel count: $(ls dist/*.whl 2>/dev/null | wc -l)"
echo "Source dist count: $(ls dist/*.tar.gz 2>/dev/null | wc -l)"
echo "======================================================================"
- name: Generate consolidated checksums
run: |
cd dist
echo "Generating consolidated SHA256 checksums..."
sha256sum *.whl *.tar.gz > CHECKSUMS-ALL.sha256
echo ""
echo "======================================================================"
echo "Consolidated checksums:"
echo "======================================================================"
cat CHECKSUMS-ALL.sha256
cd ..
- name: Create GitHub Release
uses: softprops/action-gh-release@v2
with:
tag_name: ${{ needs.check-workflows.outputs.tag_name }}
name: Release ${{ needs.check-workflows.outputs.tag_name }}
body: |
## zlmdb ${{ needs.check-workflows.outputs.tag_name }}
Official release build
Built wheels for:
- Linux x86_64 (native + manylinux_2_34)
- Linux ARM64 (manylinux_2_28 + manylinux_2_36 for PyPy)
- macOS ARM64
- Windows x86_64
Includes source distribution (.tar.gz)
See CHECKSUMS-ALL.sha256 for file integrity verification.
Also published to PyPI: https://pypi.org/project/zlmdb/
files: dist/*
draft: false
prerelease: false
discussion_category_name: ci-cd
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
- name: Validate and clean release fileset for PyPI
uses: wamp-proto/wamp-cicd/actions/check-release-fileset@main
with:
distdir: dist
mode: strict
# keep-metadata: false (default - removes CHECKSUMS, build-info.txt etc for PyPI)
targets: |
cpy311-linux-x86_64-manylinux_2_28
cpy312-linux-x86_64-manylinux_2_28
cpy313-linux-x86_64-manylinux_2_28
cpy314-linux-x86_64-manylinux_2_28
pypy311-linux-x86_64-manylinux_2_28
cpy311-linux-aarch64-manylinux_2_28
cpy312-linux-aarch64-manylinux_2_28
cpy313-linux-aarch64-manylinux_2_28
cpy314-linux-aarch64-manylinux_2_28
pypy311-linux-aarch64-manylinux_2_34
cpy311-macos-arm64
cpy312-macos-arm64
cpy313-macos-arm64
cpy314-macos-arm64
pypy311-macos-arm64
cpy311-win-amd64
cpy312-win-amd64
cpy313-win-amd64
cpy314-win-amd64
source
- name: Check if version already exists on PyPI
id: pypi_check
run: |
# Extract version from tag name (v25.12.2 -> 25.12.2)
VERSION="${{ needs.check-workflows.outputs.tag_name }}"
VERSION="${VERSION#v}"
echo "Checking if zlmdb version ${VERSION} exists on PyPI..."
# Query PyPI JSON API
HTTP_CODE=$(curl -s -o /tmp/pypi_response.json -w "%{http_code}" "https://pypi.org/pypi/zlmdb/${VERSION}/json")
if [ "${HTTP_CODE}" = "200" ]; then
echo "⚠️ WARNING: Version ${VERSION} already exists on PyPI!"
echo "⚠️ PyPI does not allow re-uploading the same version."
echo "⚠️ Skipping PyPI upload to avoid error."
echo "exists=true" >> $GITHUB_OUTPUT
elif [ "${HTTP_CODE}" = "404" ]; then
echo "✅ Version ${VERSION} does not exist on PyPI yet - proceeding with upload"
echo "exists=false" >> $GITHUB_OUTPUT
else
echo "⚠️ Unexpected HTTP code ${HTTP_CODE} from PyPI API"
echo "⚠️ Response:"
cat /tmp/pypi_response.json || echo "(no response)"
echo "⚠️ Proceeding with upload anyway (will fail if version exists)"
echo "exists=false" >> $GITHUB_OUTPUT
fi
rm -f /tmp/pypi_response.json
- name: Final validation before PyPI upload
if: steps.pypi_check.outputs.exists == 'false'
run: |
set -o pipefail
echo "======================================================================"
echo "==> FINAL PYPI VALIDATION: All Packages"
echo "======================================================================"
echo ""
echo "Last chance to catch corrupted packages before PyPI upload."
echo ""
# Install both packaging and twine from master for PEP 639 (Core Metadata 2.4) support
# Use --break-system-packages for consistency (safe in CI)
python3 -m pip install --break-system-packages git+https://github.com/pypa/packaging.git
python3 -m pip install --break-system-packages git+https://github.com/pypa/twine.git
echo ""
echo "==> Validation environment:"
echo "Python: $(python3 --version)"
echo "setuptools: $(python3 -m pip show setuptools | grep '^Version:' || echo 'not installed')"
echo "packaging: $(python3 -m pip show packaging | grep '^Version:' || echo 'not installed')"
echo "twine: $(twine --version)"
echo ""
HAS_ERRORS=0
for pkg in dist/*.whl dist/*.tar.gz; do
if [ ! -f "$pkg" ]; then
continue
fi
PKG_NAME=$(basename "$pkg")
echo "==> Validating: $PKG_NAME"
# For wheels: full integrity check
if [[ "$pkg" == *.whl ]]; then
if ! unzip -t "$pkg" > /dev/null 2>&1; then
echo " ❌ ZIP test FAIL - CORRUPTED WHEEL!"
HAS_ERRORS=1
elif ! python3 -m zipfile -t "$pkg" > /dev/null 2>&1; then
echo " ❌ Python zipfile test FAIL - CORRUPTED WHEEL!"
HAS_ERRORS=1
else
# Run twine check and capture output
twine check "$pkg" 2>&1 | tee /tmp/twine_pypi_output.txt
TWINE_EXIT=${PIPESTATUS[0]}
# Fail on nonzero exit or any error-like output
if [ "$TWINE_EXIT" -eq 0 ] && ! grep -Eqi "ERROR|FAILED|InvalidDistribution" /tmp/twine_pypi_output.txt; then
echo " ✅ All checks PASS"
else
echo " ❌ Twine check FAIL"
cat /tmp/twine_pypi_output.txt
HAS_ERRORS=1
fi
rm -f /tmp/twine_pypi_output.txt
fi
# For source dists: gzip + tar integrity
elif [[ "$pkg" == *.tar.gz ]]; then
if ! gzip -t "$pkg" 2>/dev/null; then
echo " ❌ Gzip test FAIL - CORRUPTED TARBALL!"
HAS_ERRORS=1
elif ! tar -tzf "$pkg" > /dev/null 2>&1; then
echo " ❌ Tar test FAIL - CORRUPTED TARBALL!"
HAS_ERRORS=1
else
# Run twine check and capture output
twine check "$pkg" 2>&1 | tee /tmp/twine_pypi_output.txt
TWINE_EXIT=${PIPESTATUS[0]}
# Fail on nonzero exit or any error-like output
if [ "$TWINE_EXIT" -eq 0 ] && ! grep -Eqi "ERROR|FAILED|InvalidDistribution" /tmp/twine_pypi_output.txt; then
echo " ✅ All checks PASS"
else
echo " ❌ Twine check FAIL"
cat /tmp/twine_pypi_output.txt
HAS_ERRORS=1
fi
rm -f /tmp/twine_pypi_output.txt
fi
fi
echo ""
done
if [ $HAS_ERRORS -eq 1 ]; then
echo "======================================================================"
echo "❌ PYPI VALIDATION FAILED - UPLOAD BLOCKED"
echo "======================================================================"
echo ""
echo "Corrupted packages detected. PyPI upload BLOCKED."
echo ""
exit 1
else
echo "======================================================================"
echo "✅ ALL PACKAGES VALIDATED - Safe to upload to PyPI"
echo "======================================================================"
fi
- name: Publish to PyPI using bleeding-edge twine
if: steps.pypi_check.outputs.exists == 'false'
env:
TWINE_USERNAME: __token__
TWINE_PASSWORD: ${{ secrets.PYPI_API_TOKEN }}
run: |
echo "==> Publishing to PyPI using twine from master..."
# Install bleeding-edge packaging and twine for PEP 639 support
# Use --break-system-packages for consistency (safe in CI)
python3 -m pip install --break-system-packages git+https://github.com/pypa/packaging.git
python3 -m pip install --break-system-packages git+https://github.com/pypa/twine.git
echo "Upload environment:"
echo "twine: $(twine --version)"
echo "packaging: $(python3 -m pip show packaging | grep '^Version:')"
echo ""
# Upload to PyPI - explicit patterns to avoid uploading metadata files
twine upload dist/*.whl dist/*.tar.gz --verbose