Skip to content

Commit 3183adf

Browse files
committed
Upload to PyPI
1 parent ac0a3db commit 3183adf

File tree

2 files changed

+112
-9
lines changed

2 files changed

+112
-9
lines changed

.github/workflows/on_external_dispatch.yml

Lines changed: 9 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,4 @@
1-
name: Builds triggered externally by DuckDB
1+
name: External Dispatch
22
on:
33
workflow_dispatch:
44
inputs:
@@ -12,7 +12,7 @@ on:
1212
required: false
1313
publish_packages:
1414
type: boolean
15-
description: Publish packages on S3 and PyPI?
15+
description: Publish to S3
1616
required: true
1717
default: false
1818

@@ -27,8 +27,8 @@ jobs:
2727
duckdb_git_ref: ${{ inputs.duckdb-sha }}
2828
force_version: ${{ inputs.force_version }}
2929

30-
upload_to_staging:
31-
name: Upload Artifacts to staging
30+
publish-s3:
31+
name: Publish Artifacts to the S3 Staging Bucket
3232
runs-on: ubuntu-latest
3333
needs: [ externally_triggered_build ]
3434
if: ${{ github.repository_owner == 'duckdb' && inputs.publish_packages }}
@@ -48,10 +48,10 @@ jobs:
4848
aws-secret-access-key: ${{ secrets.S3_DUCKDB_STAGING_KEY }}
4949

5050
- name: Upload artifacts to S3 bucket
51+
# semantics: if a version is forced then we upload into a folder by the version name, otherwise we upload
52+
# into a folder that is named <run id>-<run-attempt>. Only the latter will be discovered be
53+
# upload_to_pypi.yml.
5154
shell: bash
5255
run: |
53-
DUCKDB_SHA="${{ inputs.duckdb-sha }}"
54-
aws s3 cp \
55-
artifacts \
56-
s3://duckdb-staging/${DUCKDB_SHA:0:10}/${{ github.repository }}/ \
57-
--recursive
56+
FOLDER="${{ inputs.force_version != '' && inputs.force_version || format('{0}-{1}', github.run_id, github.run_attempt) }}"
57+
aws s3 cp artifacts s3://duckdb-staging/${{ github.repository }}/${FOLDER}/ --recursive
Lines changed: 103 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,103 @@
1+
name: upload_to_pypi.yml
2+
on:
3+
# this workflow runs after the below workflows are completed
4+
workflow_run:
5+
workflows: [ External Dispatch ]
6+
types: [ completed ]
7+
branches:
8+
- main
9+
- v*.*-*
10+
workflow_dispatch:
11+
inputs:
12+
environment:
13+
description: Environment to run in ()
14+
type: choice
15+
required: true
16+
default: test.pypi
17+
options:
18+
- test.pypi
19+
- production.pypi
20+
artifact_folder:
21+
description: The S3 folder that contains the artifacts (s3://duckdb-staging/duckdb/duckdb-python/<artifact_folder>)
22+
type: string
23+
required: true
24+
25+
jobs:
26+
prepare:
27+
name: Prepare and guard upload
28+
if: ${{ github.repository_owner == 'duckdb' && ( github.event.workflow_run.conclusion == 'success' || github.event_name != 'workflow_run' ) }}
29+
runs-on: ubuntu-latest
30+
outputs:
31+
s3_prefix: ${{ steps.get_s3_prefix.outputs.s3_prefix }}
32+
steps:
33+
- name: Determine S3 Prefix
34+
id: get_s3_prefix
35+
run: |
36+
if [[ "${{ github.event_name }}" == "workflow_dispatch" ]]; then
37+
artifact_folder="${{ inputs.artifact_folder }}"
38+
elif [[ -n "${{ github.event.workflow_run.id }}" && -n "${{ github.event.workflow_run.run_attempt }}" ]]; then
39+
artifact_folder="${{ github.event.workflow_run.id }}-${{ github.event.workflow_run.run_attempt }}"
40+
fi
41+
if [[ -n "${artifact_folder}" ]]; then
42+
s3_prefix="${{ github.repository }}/${artifact_folder}"
43+
echo "Created S3 prefix: ${s3_prefix}"
44+
echo "s3_prefix=${s3_prefix}" >> $GITHUB_OUTPUT
45+
else
46+
echo "Can't determine S3 prefix for event: ${{ github.event_name }}. Quitting."
47+
exit 1
48+
fi
49+
50+
- name: Authenticate With AWS
51+
uses: aws-actions/configure-aws-credentials@v4
52+
with:
53+
aws-region: 'us-east-2'
54+
aws-access-key-id: ${{ secrets.S3_DUCKDB_STAGING_ID }}
55+
aws-secret-access-key: ${{ secrets.S3_DUCKDB_STAGING_KEY }}
56+
57+
- name: Check S3 Prefix
58+
shell: bash
59+
run: |
60+
if [[ $(aws s3api list-objects-v2 \
61+
--bucket duckdb-staging \
62+
--prefix "${{ steps.get_s3_prefix.outputs.s3_prefix }}/" \
63+
--max-items 1 \
64+
--query 'Contents[0].Key' \
65+
--output text) == "None" ]]; then
66+
echo "Prefix does not exist: ${{ steps.get_s3_prefix.outputs.s3_prefix }}"
67+
echo "${{ github.event_name == 'workflow_run' && 'Possibly built a stable release?' || 'Unexpected error' }}"
68+
exit 1
69+
fi
70+
71+
publish-pypi:
72+
name: Publish Artifacts to PyPI
73+
needs: [ prepare ]
74+
runs-on: ubuntu-latest
75+
environment:
76+
name: ${{ github.event_name == 'workflow_dispatch' && inputs.environment || 'test.pypi' }}
77+
if: ${{ vars.PYPI_URL != '' }}
78+
permissions:
79+
# this is needed for the OIDC flow that is used with trusted publishing on PyPI
80+
id-token: write
81+
steps:
82+
- name: Authenticate With AWS
83+
uses: aws-actions/configure-aws-credentials@v4
84+
with:
85+
aws-region: 'us-east-2'
86+
aws-access-key-id: ${{ secrets.S3_DUCKDB_STAGING_ID }}
87+
aws-secret-access-key: ${{ secrets.S3_DUCKDB_STAGING_KEY }}
88+
89+
- name: Download Artifacts From S3
90+
env:
91+
S3_URL: 's3://duckdb-staging/${{ needs.prepare.outputs.s3_prefix }}/'
92+
AWS_ACCESS_KEY_ID: ${{ secrets.S3_DUCKDB_STAGING_ID }}
93+
AWS_SECRET_ACCESS_KEY: ${{ secrets.S3_DUCKDB_STAGING_KEY }}
94+
run: |
95+
mkdir packages
96+
aws s3 cp --recursive "${S3_URL}" packages
97+
98+
- name: Upload artifacts to PyPI
99+
if: ${{ vars.PYPI_URL != '' }}
100+
uses: pypa/gh-action-pypi-publish@release/v1
101+
with:
102+
repository-url: ${{ vars.PYPI_URL }}
103+
packages-dir: packages

0 commit comments

Comments
 (0)