| 
1 | 1 | name: Upload Artifacts to PyPI  | 
2 | 2 | on:  | 
3 |  | -  # this workflow runs after the "External Dispatch" workflow is completed  | 
4 |  | -  workflow_run:  | 
5 |  | -    workflows: [ External Dispatch ]  | 
6 |  | -    types: [ completed ]  | 
7 |  | -    branches:  | 
8 |  | -      - main  | 
9 |  | -      - v*.*-*  | 
 | 3 | +  workflow_call:  | 
 | 4 | +    inputs:  | 
 | 5 | +      environment:  | 
 | 6 | +        description: CI environment to run in (test.pypi or production.pypi)  | 
 | 7 | +        type: string  | 
 | 8 | +        required: true  | 
 | 9 | +      version:  | 
 | 10 | +        description: The version to upload (must be present in the S3 staging bucket)  | 
 | 11 | +        type: string  | 
 | 12 | +        required: true  | 
10 | 13 |   workflow_dispatch:  | 
11 | 14 |     inputs:  | 
12 | 15 |       environment:  | 
13 |  | -        description: Environment to run in ()  | 
 | 16 | +        description: CI environment to run in (test.pypi or production.pypi)  | 
14 | 17 |         type: choice  | 
15 | 18 |         required: true  | 
16 | 19 |         default: test.pypi  | 
17 | 20 |         options:  | 
18 | 21 |           - test.pypi  | 
19 | 22 |           - production.pypi  | 
20 |  | -      artifact-folder:  | 
21 |  | -        description: The S3 folder that contains the artifacts (s3://duckdb-staging/duckdb/duckdb-python/<artifact-folder>)  | 
 | 23 | +      version:  | 
 | 24 | +        description: The version to upload (must be present in the S3 staging bucket)  | 
22 | 25 |         type: string  | 
23 | 26 |         required: true  | 
24 | 27 | 
 
  | 
25 |  | -concurrency: ${{ inputs.artifact-folder || format('{0}-{1}', github.event.workflow_run.id, github.event.workflow_run.run_attempt) }}  | 
 | 28 | +concurrency:  | 
 | 29 | +  group: ${{ inputs.version }}  | 
 | 30 | +  cancel-in-progress: true  | 
26 | 31 | 
 
  | 
27 | 32 | jobs:  | 
28 |  | -  prepare:  | 
29 |  | -    name: Prepare and guard upload  | 
30 |  | -    if: ${{ github.repository_owner == 'duckdb' && ( github.event.workflow_run.conclusion == 'success' || github.event_name != 'workflow_run' ) }}  | 
31 |  | -    runs-on: ubuntu-latest  | 
32 |  | -    outputs:  | 
33 |  | -      s3_prefix: ${{ steps.get_s3_prefix.outputs.s3_prefix }}  | 
34 |  | -    steps:  | 
35 |  | -      - name: Determine S3 Prefix  | 
36 |  | -        id: get_s3_prefix  | 
37 |  | -        run: |  | 
38 |  | -          artifact_folder="${{ inputs.artifact-folder || format('{0}-{1}', github.event.workflow_run.id, github.event.workflow_run.run_attempt) }}"  | 
39 |  | -          if [[ -n "${artifact_folder}" ]]; then  | 
40 |  | -            s3_prefix="${{ github.repository }}/${artifact_folder}"  | 
41 |  | -            echo "Created S3 prefix: ${s3_prefix}"  | 
42 |  | -            echo "s3_prefix=${s3_prefix}" >> $GITHUB_OUTPUT  | 
43 |  | -          else  | 
44 |  | -            echo "Can't determine S3 prefix for event: ${{ github.event_name }}. Quitting."  | 
45 |  | -            exit 1  | 
46 |  | -          fi  | 
47 |  | -
  | 
48 |  | -      - name: Authenticate With AWS  | 
49 |  | -        uses: aws-actions/configure-aws-credentials@v4  | 
50 |  | -        with:  | 
51 |  | -          aws-region: 'us-east-2'  | 
52 |  | -          aws-access-key-id: ${{ secrets.S3_DUCKDB_STAGING_ID }}  | 
53 |  | -          aws-secret-access-key: ${{ secrets.S3_DUCKDB_STAGING_KEY }}  | 
54 |  | - | 
55 |  | -      - name: Check S3 Prefix  | 
56 |  | -        shell: bash  | 
57 |  | -        run: |  | 
58 |  | -          if [[ $(aws s3api list-objects-v2 \  | 
59 |  | -            --bucket duckdb-staging \  | 
60 |  | -            --prefix "${{ steps.get_s3_prefix.outputs.s3_prefix }}/" \  | 
61 |  | -            --max-items 1 \  | 
62 |  | -            --query 'Contents[0].Key' \  | 
63 |  | -            --output text) == "None" ]]; then  | 
64 |  | -            echo "Prefix does not exist: ${{ steps.get_s3_prefix.outputs.s3_prefix }}"  | 
65 |  | -            echo "${{ github.event_name == 'workflow_run' && 'Possibly built a stable release?' || 'Unexpected error' }}"  | 
66 |  | -            exit 1  | 
67 |  | -          fi  | 
68 |  | -
  | 
69 | 33 |   publish-pypi:  | 
70 | 34 |     name: Publish Artifacts to PyPI  | 
71 |  | -    needs: [ prepare ]  | 
72 | 35 |     runs-on: ubuntu-latest  | 
73 | 36 |     environment:  | 
74 |  | -      name: ${{ github.event_name == 'workflow_dispatch' && inputs.environment || 'test.pypi' }}  | 
 | 37 | +      name: ${{ inputs.environment }}  | 
75 | 38 |     permissions:  | 
76 | 39 |       # this is needed for the OIDC flow that is used with trusted publishing on PyPI  | 
77 | 40 |       id-token: write  | 
78 | 41 |     steps:  | 
79 |  | -      - name: Fail if PYPI_HOST is not set  | 
80 |  | -        if: ${{ vars.PYPI_HOST == '' }}  | 
81 |  | -        shell: bash  | 
 | 42 | +      - if: ${{ vars.PYPI_HOST == '' }}  | 
82 | 43 |         run: |  | 
83 |  | -          env_name="${{ github.event_name == 'workflow_dispatch' && inputs.environment || 'test.pypi' }}"  | 
84 |  | -          echo "Error: vars.PYPI_HOST is not set in the resolved environment (${env_name})"  | 
 | 44 | +          echo "Error: PYPI_HOST is not set in CI environment '${{ inputs.environment }}'"  | 
85 | 45 |           exit 1  | 
86 | 46 | 
  | 
87 | 47 |       - name: Authenticate With AWS  | 
 | 
93 | 53 | 
 
  | 
94 | 54 |       - name: Download Artifacts From S3  | 
95 | 55 |         env:  | 
96 |  | -          S3_URL: 's3://duckdb-staging/${{ needs.prepare.outputs.s3_prefix }}/'  | 
 | 56 | +          S3_URL: 's3://duckdb-staging/${{ github.repository }}/${{ inputs.version }}/'  | 
97 | 57 |           AWS_ACCESS_KEY_ID: ${{ secrets.S3_DUCKDB_STAGING_ID }}  | 
98 | 58 |           AWS_SECRET_ACCESS_KEY: ${{ secrets.S3_DUCKDB_STAGING_KEY }}  | 
99 | 59 |         run: |  | 
 | 
110 | 70 |     name: Remove Nightlies from PyPI  | 
111 | 71 |     needs: publish-pypi  | 
112 | 72 |     uses: ./.github/workflows/cleanup_pypi.yml  | 
 | 73 | +    with:  | 
 | 74 | +      environment: ${{ inputs.environment }}  | 
0 commit comments