Ingest Single Dataset: dcp_pluto 25v3.1 #434
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
| name: Ingest Single Dataset | |
| run-name: "Ingest Single Dataset: ${{ inputs.dataset }} ${{ inputs.version }}" | |
| on: | |
| workflow_dispatch: | |
| inputs: | |
| dataset: | |
| description: "Name of the dataset (required)" | |
| required: true | |
| default: dcp_mappluto | |
| latest: | |
| type: boolean | |
| description: "Tag this version as latest (optional)" | |
| required: false | |
| default: true | |
| version: | |
| description: "The version of the dataset (i.e. 22v2, 21C) if needed (optional)" | |
| required: false | |
| mode: | |
| description: "Preprocessing 'mode', if applicable" | |
| required: false | |
| dev_image: | |
| description: "Use dev image specific to this branch (if exists)? Overridden by below" | |
| type: boolean | |
| required: true | |
| default: false | |
| custom_dev_image: | |
| description: "Manually specify custom image tag?" | |
| type: string | |
| required: false | |
| dev_bucket: | |
| description: "dev bucket to use in place of recipes. If name of bucket is 'de-dev-a' simply put 'a'" | |
| type: string | |
| required: false | |
| jobs: | |
| dataloading: | |
| runs-on: ubuntu-22.04 | |
| container: | |
| image: nycplanning/build-geosupport:${{ inputs.custom_dev_image || (inputs.dev_image && format('dev-{0}', github.head_ref || github.ref_name) || 'latest') }} | |
| defaults: | |
| run: | |
| shell: bash | |
| env: | |
| TEMPLATE_DIR: ./ingest_templates | |
| BUILD_NAME: ${{ github.head_ref || github.ref_name }} | |
| RECIPES_BUCKET: ${{ inputs.dev_bucket && format('de-dev-{0}', inputs.dev_bucket) || 'edm-recipes' }} | |
| PUBLISHING_BUCKET: ${{ inputs.dev_bucket && format('de-dev-{0}', inputs.dev_bucket) || 'edm-publishing' }} | |
| DEV_FLAG: ${{ inputs.dev_bucket && 'true' || 'false' }} | |
| SFTP_PRIVATE_KEY_PATH: /github/home/.ssh/id_rsa_ginger # TODO as mentioned elsewhere, this should be configured | |
| steps: | |
| - uses: actions/checkout@v4 | |
| - name: Load Secrets | |
| id: op-load-secret | |
| uses: 1password/load-secrets-action@v1 | |
| with: | |
| export-env: false | |
| env: | |
| OP_SERVICE_ACCOUNT_TOKEN: ${{ secrets.OP_SERVICE_ACCOUNT_TOKEN }} | |
| SSH_PRIVATE_KEY_GINGER: "op://Data Engineering/DOF_SFTP/private key" | |
| KNOWN_HOSTS: "op://Data Engineering/DOF_SFTP/text" # should rename this but 1pass was being weird | |
| - name: Set up SSH for Ginger | |
| run: | | |
| mkdir -p ~/.ssh | |
| echo "${{ steps.op-load-secret.outputs.SSH_PRIVATE_KEY_GINGER }}" > ~/.ssh/id_rsa_ginger | |
| echo "${{ steps.op-load-secret.outputs.KNOWN_HOSTS }}" > ~/.ssh/known_hosts | |
| - name: Load Secrets to Env | |
| uses: 1password/load-secrets-action@v1 | |
| with: | |
| export-env: true | |
| env: | |
| OP_SERVICE_ACCOUNT_TOKEN: ${{ secrets.OP_SERVICE_ACCOUNT_TOKEN }} | |
| AWS_S3_ENDPOINT: "op://Data Engineering/DO_keys/AWS_S3_ENDPOINT" | |
| AWS_SECRET_ACCESS_KEY: "op://Data Engineering/DO_keys/AWS_SECRET_ACCESS_KEY" | |
| AWS_ACCESS_KEY_ID: "op://Data Engineering/DO_keys/AWS_ACCESS_KEY_ID" | |
| BUILD_ENGINE_SERVER: "op://Data Engineering/EDM_DATA/server_url" | |
| SOCRATA_USER: "op://Data Engineering/DCP_OpenData/username" | |
| SOCRATA_PASSWORD: "op://Data Engineering/DCP_OpenData/password" | |
| SFTP_HOST: "op://Data Engineering/DOF_SFTP/url" | |
| SFTP_USER: "op://Data Engineering/DOF_SFTP/username" | |
| - name: Finish container setup ... | |
| run: ./bash/docker_container_setup.sh | |
| - name: Archive dataset | |
| env: | |
| latest: ${{ github.event.inputs.latest == 'true' && '--latest' || '' }} | |
| version: ${{ github.event.inputs.version && format('--version {0}', github.event.inputs.version) || '' }} | |
| mode: ${{ github.event.inputs.mode && format('--mode {0}', github.event.inputs.mode) || '' }} | |
| run: python3 -m dcpy lifecycle scripts ingest_or_library_archive ${{ inputs.dataset }} $latest $version $mode --push-to-s3 |