Skip to content

External Dispatch

External Dispatch #13

name: External Dispatch
on:
workflow_dispatch:
inputs:
duckdb-sha:
type: string
description: The DuckDB submodule commit to build against
required: true
commit-duckdb-sha:
type: boolean
description: Commit and push the DuckDB submodule ref
default: false
force-version:
type: string
description: Force version (vX.Y.Z-((rc|post)N))
required: false
publish-packages:
type: boolean
description: Publish to S3
required: true
default: false
defaults:
run:
shell: bash
jobs:
commit_duckdb_submodule_sha:
name: Commit the submodule to the given DuckDB sha
if: ${{ inputs.commit-duckdb-sha }}
runs-on: ubuntu-24.04
steps:
- name: Checkout DuckDB Python
uses: actions/checkout@v4
with:
ref: ${{ github.ref }}
fetch-depth: 0
submodules: true
- name: Checkout DuckDB
run: |
cd external/duckdb
git fetch origin
git checkout ${{ inputs.duckdb-sha }}
- name: Commit and push new submodule ref
# see https://github.com/actions/checkout?tab=readme-ov-file#push-a-commit-to-a-pr-using-the-built-in-token
run: |
git config user.name "github-actions[bot]"
git config user.email "41898282+github-actions[bot]@users.noreply.github.com"
git add external/duckdb
if git diff --cached --quiet; then
echo "No changes to commit: submodule ref is unchanged."
exit 0
fi
git commit -m "Update submodule ref"
git push
externally_triggered_build:
name: Build and test releases
uses: ./.github/workflows/packaging.yml
with:
minimal: false
testsuite: all
git-ref: ${{ github.ref }}
duckdb-git-ref: ${{ inputs.duckdb-sha }}
force-version: ${{ inputs.force-version }}
publish_s3:
name: Publish Artifacts to the S3 Staging Bucket
runs-on: ubuntu-latest
needs: [ externally_triggered_build ]
if: ${{ github.repository_owner == 'duckdb' && inputs.publish-packages }}
steps:
- name: Fetch artifacts
uses: actions/download-artifact@v4
with:
pattern: '{sdist,wheel}*'
path: artifacts/
merge-multiple: true
- name: Authenticate with AWS
uses: aws-actions/configure-aws-credentials@v4
with:
aws-region: 'us-east-2'
aws-access-key-id: ${{ secrets.S3_DUCKDB_STAGING_ID }}
aws-secret-access-key: ${{ secrets.S3_DUCKDB_STAGING_KEY }}
- name: Upload artifacts to S3 bucket
# semantics: if a version is forced then we upload into a folder by the version name, otherwise we upload
# into a folder that is named <run id>-<run-attempt>. Only the latter will be discovered be
# upload_to_pypi.yml.
run: |
FOLDER="${{ inputs.force-version != '' && inputs.force-version || format('{0}-{1}', github.run_id, github.run_attempt) }}"
aws s3 cp artifacts s3://duckdb-staging/${{ github.repository }}/${FOLDER}/ --recursive