| 
 | 1 | +name: upload_to_pypi.yml  | 
 | 2 | +on:  | 
 | 3 | +  # this workflow runs after the below workflows are completed  | 
 | 4 | +  workflow_run:  | 
 | 5 | +    workflows: [ External Dispatch ]  | 
 | 6 | +    types: [ completed ]  | 
 | 7 | +    branches:  | 
 | 8 | +      - main  | 
 | 9 | +      - v*.*-*  | 
 | 10 | +  workflow_dispatch:  | 
 | 11 | +    inputs:  | 
 | 12 | +      environment:  | 
 | 13 | +        description: Environment to run in ()  | 
 | 14 | +        type: choice  | 
 | 15 | +        required: true  | 
 | 16 | +        default: test.pypi  | 
 | 17 | +        options:  | 
 | 18 | +          - test.pypi  | 
 | 19 | +          - production.pypi  | 
 | 20 | +      artifact_folder:  | 
 | 21 | +        description: The S3 folder that contains the artifacts (s3://duckdb-staging/duckdb/duckdb-python/<artifact_folder>)  | 
 | 22 | +        type: string  | 
 | 23 | +        required: true  | 
 | 24 | + | 
 | 25 | +jobs:  | 
 | 26 | +  prepare:  | 
 | 27 | +    name: Prepare and guard upload  | 
 | 28 | +    if: ${{ github.repository_owner == 'duckdb' && ( github.event.workflow_run.conclusion == 'success' || github.event_name != 'workflow_run' ) }}  | 
 | 29 | +    runs-on: ubuntu-latest  | 
 | 30 | +    outputs:  | 
 | 31 | +      s3_prefix: ${{ steps.get_s3_prefix.outputs.s3_prefix }}  | 
 | 32 | +    steps:  | 
 | 33 | +      - name: Determine S3 Prefix  | 
 | 34 | +        id: get_s3_prefix  | 
 | 35 | +        run: |  | 
 | 36 | +          if [[ "${{ github.event_name }}" == "workflow_dispatch" ]]; then  | 
 | 37 | +            artifact_folder="${{ inputs.artifact_folder }}"  | 
 | 38 | +          elif [[ -n "${{ github.event.workflow_run.id }}" && -n "${{ github.event.workflow_run.run_attempt }}" ]]; then  | 
 | 39 | +            artifact_folder="${{ github.event.workflow_run.id }}-${{ github.event.workflow_run.run_attempt }}"  | 
 | 40 | +          fi  | 
 | 41 | +          if [[ -n "${artifact_folder}" ]]; then  | 
 | 42 | +            s3_prefix="${{ github.repository }}/${artifact_folder}"  | 
 | 43 | +            echo "Created S3 prefix: ${s3_prefix}"  | 
 | 44 | +            echo "s3_prefix=${s3_prefix}" >> $GITHUB_OUTPUT  | 
 | 45 | +          else  | 
 | 46 | +            echo "Can't determine S3 prefix for event: ${{ github.event_name }}. Quitting."  | 
 | 47 | +            exit 1  | 
 | 48 | +          fi  | 
 | 49 | +
  | 
 | 50 | +      - name: Authenticate With AWS  | 
 | 51 | +        uses: aws-actions/configure-aws-credentials@v4  | 
 | 52 | +        with:  | 
 | 53 | +          aws-region: 'us-east-2'  | 
 | 54 | +          aws-access-key-id: ${{ secrets.S3_DUCKDB_STAGING_ID }}  | 
 | 55 | +          aws-secret-access-key: ${{ secrets.S3_DUCKDB_STAGING_KEY }}  | 
 | 56 | + | 
 | 57 | +      - name: Check S3 Prefix  | 
 | 58 | +        shell: bash  | 
 | 59 | +        run: |  | 
 | 60 | +          if [[ $(aws s3api list-objects-v2 \  | 
 | 61 | +            --bucket duckdb-staging \  | 
 | 62 | +            --prefix "${{ steps.get_s3_prefix.outputs.s3_prefix }}/" \  | 
 | 63 | +            --max-items 1 \  | 
 | 64 | +            --query 'Contents[0].Key' \  | 
 | 65 | +            --output text) == "None" ]]; then  | 
 | 66 | +            echo "Prefix does not exist: ${{ steps.get_s3_prefix.outputs.s3_prefix }}"  | 
 | 67 | +            echo "${{ github.event_name == 'workflow_run' && 'Possibly built a stable release?' || 'Unexpected error' }}"  | 
 | 68 | +            exit 1  | 
 | 69 | +          fi  | 
 | 70 | +
  | 
 | 71 | +  publish-pypi:  | 
 | 72 | +    name: Publish Artifacts to PyPI  | 
 | 73 | +    needs: [ prepare ]  | 
 | 74 | +    runs-on: ubuntu-latest  | 
 | 75 | +    environment:  | 
 | 76 | +      name: ${{ github.event_name == 'workflow_dispatch' && inputs.environment || 'test.pypi' }}  | 
 | 77 | +    if: ${{ vars.PYPI_URL != '' }}  | 
 | 78 | +    permissions:  | 
 | 79 | +      # this is needed for the OIDC flow that is used with trusted publishing on PyPI  | 
 | 80 | +      id-token: write  | 
 | 81 | +    steps:  | 
 | 82 | +      - name: Authenticate With AWS  | 
 | 83 | +        uses: aws-actions/configure-aws-credentials@v4  | 
 | 84 | +        with:  | 
 | 85 | +          aws-region: 'us-east-2'  | 
 | 86 | +          aws-access-key-id: ${{ secrets.S3_DUCKDB_STAGING_ID }}  | 
 | 87 | +          aws-secret-access-key: ${{ secrets.S3_DUCKDB_STAGING_KEY }}  | 
 | 88 | + | 
 | 89 | +      - name: Download Artifacts From S3  | 
 | 90 | +        env:  | 
 | 91 | +          S3_URL: 's3://duckdb-staging/${{ needs.prepare.outputs.s3_prefix }}/'  | 
 | 92 | +          AWS_ACCESS_KEY_ID: ${{ secrets.S3_DUCKDB_STAGING_ID }}  | 
 | 93 | +          AWS_SECRET_ACCESS_KEY: ${{ secrets.S3_DUCKDB_STAGING_KEY }}  | 
 | 94 | +        run: |  | 
 | 95 | +          mkdir packages  | 
 | 96 | +          aws s3 cp --recursive "${S3_URL}" packages  | 
 | 97 | +
  | 
 | 98 | +      - name: Upload artifacts to PyPI  | 
 | 99 | +        if: ${{ vars.PYPI_URL != '' }}  | 
 | 100 | +        uses: pypa/gh-action-pypi-publish@release/v1  | 
 | 101 | +        with:  | 
 | 102 | +          repository-url: ${{ vars.PYPI_URL }}  | 
 | 103 | +          packages-dir: packages  | 
0 commit comments