diff --git a/.github/workflows/cicd.yml b/.github/workflows/cicd.yml deleted file mode 100644 index 32190bf..0000000 --- a/.github/workflows/cicd.yml +++ /dev/null @@ -1,194 +0,0 @@ -name: CI/CD πŸš€ - -permissions: - id-token: write - contents: read - -on: - workflow_dispatch: - inputs: - environment: - type: environment - required: true - description: Environment to deploy to - -run-name: Deploy to ${{ inputs.environment }} environment by @${{ github.actor }} - -jobs: - deploy-veda-auth: - name: Deploy VEDA auth πŸ” - runs-on: ubuntu-latest - env: - DIRECTORY: veda-auth - ENVIRONMENT: ${{ github.event.inputs.environment }} - environment: ${{ github.event.inputs.environment }} - - steps: - - name: Checkout - uses: actions/checkout@v4 - with: - lfs: "true" - submodules: "recursive" - - - name: Checkout to the provided Git Ref - run: | - cd ${{ env.DIRECTORY }} - git config remote.origin.fetch "+refs/heads/*:refs/remotes/origin/*" - git fetch --tags origin - git checkout ${{ vars.VEDA_AUTH_GIT_REF || 'main'}} - - - name: Configure AWS Credentials - uses: aws-actions/configure-aws-credentials@v2 - with: - role-to-assume: ${{ secrets.DEPLOYMENT_ROLE_ARN }} - role-session-name: "${{ env.DIRECTORY }}-github-${{ env.ENVIRONMENT }}-auth-deployment" - aws-region: "us-west-2" - - - name: Run auth deployment - id: deploy_auth_stack - uses: "./veda-auth/.github/actions/cdk-deploy" - with: - dir: "${{ env.DIRECTORY }}" - env_aws_secret_name: ${{ vars.DEPLOYMENT_ENV_SECRET_NAME }} - - - name: Get Auth Stack Name - id: get_auth_stack - shell: bash - run: | - stack=$(jq 'keys_unsorted[0]' ${HOME}/cdk-outputs.json) - echo "auth_stackname=$stack" >> $GITHUB_OUTPUT - - outputs: - auth_stack_name: ${{ steps.get_auth_stack.outputs.auth_stackname }} - - deploy-veda-backend: - name: Deploy VEDA backend βš™οΈ - runs-on: ubuntu-latest - env: - DIRECTORY: veda-backend - ENVIRONMENT: ${{ github.event.inputs.environment }} - needs: [deploy-veda-auth] - environment: ${{ github.event.inputs.environment }} - - steps: - - name: Checkout - uses: actions/checkout@v4 - with: - lfs: "true" - submodules: "recursive" - - - name: Checkout to the provided Git Ref - run: | - cd ${{ env.DIRECTORY }} - git config remote.origin.fetch "+refs/heads/*:refs/remotes/origin/*" - git fetch --tags origin - echo `git checkout ${{ vars.VEDA_BACKEND_GIT_REF || 'main' }}` - git checkout ${{ vars.VEDA_BACKEND_GIT_REF || 'main' }} - - - name: Configure AWS Credentials - uses: aws-actions/configure-aws-credentials@v2 - with: - role-to-assume: ${{ secrets.DEPLOYMENT_ROLE_ARN }} - role-session-name: "${{ env.DIRECTORY }}-github-${{ env.ENVIRONMENT }}-backend-deployment" - aws-region: "us-west-2" - - - name: Run deployment - uses: "./veda-backend/.github/actions/cdk-deploy" - id: deploy_backend_stack - with: - env_aws_secret_name: ${{ vars.DEPLOYMENT_ENV_SECRET_NAME }} - auth_stack_name: "${{ needs.deploy-veda-auth.outputs.auth_stack_name }}" - dir: "${{ env.DIRECTORY }}" - script_path: "${{ github.workspace }}/scripts/generate_env_file.py" - - - name: Get Backend Stack Name - id: get_backend_stack - shell: bash - run: | - stack=$(jq 'keys_unsorted[0]' ${HOME}/cdk-outputs.json) - echo "backend_stackname=$stack" >> $GITHUB_OUTPUT - - outputs: - backend_stack_name: ${{ steps.get_backend_stack.outputs.backend_stackname }} - - deploy-veda-data-airflow: - name: deploy VEDA data airflow πŸƒ - runs-on: ubuntu-latest - env: - DIRECTORY: veda-data-airflow - AWS_REGION: "us-west-2" - ENVIRONMENT: ${{ github.event.inputs.environment }} - needs: [deploy-veda-auth, deploy-veda-backend] - environment: ${{ github.event.inputs.environment }} - - steps: - - name: Checkout - uses: actions/checkout@v4 - with: - lfs: "true" - submodules: "recursive" - - - name: Checkout to the provided Git Ref - run: | - cd ${{ env.DIRECTORY }} - git config remote.origin.fetch "+refs/heads/*:refs/remotes/origin/*" - git fetch --tags origin - echo `git checkout origin/${{ vars.VEDA_DATA_AIRFLOW_GIT_REF || 'main' }}` - git checkout origin/${{ vars.VEDA_DATA_AIRFLOW_GIT_REF || 'main'}} - - - name: Configure AWS Credentials - uses: aws-actions/configure-aws-credentials@v4 - with: - role-to-assume: ${{ secrets.DEPLOYMENT_ROLE_ARN }} - role-session-name: "${{ env.DIRECTORY }}-github-${{ env.ENVIRONMENT }}-airflow-deployment" - aws-region: "${{ env.AWS_REGION }}" - - - name: Run deployment - uses: "./veda-data-airflow/.github/actions/terraform-deploy" - with: - env-file: ".env" - env_aws_secret_name: ${{ vars.DEPLOYMENT_ENV_SECRET_NAME }} - dir: "${{ env.DIRECTORY }}" - script_path: "${{ github.workspace }}/scripts/generate_env_file.py" - backend_stack_name: "${{ needs.deploy-veda-backend.outputs.backend_stack_name }}" - auth_stack_name: "${{ needs.deploy-veda-auth.outputs.auth_stack_name }}" - - test-deployment: - name: Test Deployment πŸ‘¨πŸ»β€πŸ”¬ - runs-on: ubuntu-latest - needs: [ deploy-veda-backend ] - env: - DIRECTORY: integration_test - ENVIRONMENT: ${{ github.event.inputs.environment }} - AWS_DEFAULT_REGION: us-west-2 - environment: ${{ github.event.inputs.environment }} - steps: - - uses: actions/checkout@v2 - - - name: Set up Python - uses: actions/setup-python@v4 - with: - python-version: "3.10" - cache: "pip" - cache-dependency-path: | - ${{ github.workspace }}/${{ env.DIRECTORY }}/requirements.txt - - - name: Configure AWS Credentials - uses: aws-actions/configure-aws-credentials@v2 - with: - role-to-assume: ${{ secrets.DEPLOYMENT_ROLE_ARN }} - role-session-name: "${{ env.DIRECTORY }}-github-${{ env.ENVIRONMENT }}-integration-test" - aws-region: "${{ env.AWS_DEFAULT_REGION }}" - - - name: Install python dependencies - shell: bash - working-directory: ${{ env.DIRECTORY }} - run: | - pip install -r requirements.txt - python "${{ github.workspace }}/scripts/generate_env_file.py" --secret-id ${{ vars.DEPLOYMENT_ENV_SECRET_NAME }} --stack-names "${{ needs.deploy-veda-backend.outputs.backend_stack_name }}" - - - name: Integration test - shell: bash - working-directory: ${{ env.DIRECTORY }} - run: | - pytest . diff --git a/.github/workflows/deploy.yml b/.github/workflows/deploy.yml new file mode 100644 index 0000000..dd6a10f --- /dev/null +++ b/.github/workflows/deploy.yml @@ -0,0 +1,615 @@ +name: Deploy πŸš€ + +permissions: + id-token: write + contents: read + issues: write + + +on: + workflow_call: + inputs: + environment: + type: string + required: true + description: Environment to deploy to + DEPLOY_AUTH: + type: string + required: true + default: false + description: DEPLOY_AUTH + DEPLOY_BACKEND: + type: string + required: true + default: true + description: DEPLOY_BACKEND + DEPLOY_FEATURES_API: + type: string + required: true + default: false + description: DEPLOY_FEATURES_API + DEPLOY_ROUTES: + type: string + required: true + default: false + description: DEPLOY_ROUTES + DEPLOY_SM2A: + type: string + required: true + default: true + description: DEPLOY_SM2A + DEPLOY_MONITORING: + type: string + required: true + default: false + description: DEPLOY_MONITORING + DEPLOY_TITILER_MULTIDIM: + type: string + required: true + default: false + description: DEPLOY_TITILER_MULTIDIM + DEPLOY_S3_DISASTER_RECOVERY: + type: string + required: true + default: false + description: DEPLOY_S3_DISASTER_RECOVERY + DEPLOY_TITILER_CMR: + type: string + required: true + default: false + description: DEPLOY_TITILER_CMR + +concurrency: + group: ${{ inputs.environment }} + cancel-in-progress: true + +run-name: Deploy to ${{ inputs.environment }} environment by @${{ github.actor }} + +jobs: + + deploy-veda-auth: + name: Deploy VEDA auth πŸ” + runs-on: ubuntu-latest + env: + DIRECTORY: veda-auth + ENVIRONMENT: ${{ inputs.environment }} + environment: ${{ inputs.environment }} + if: ${{ inputs.DEPLOY_AUTH == 'true' }} + + steps: + - name: Checkout + uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 + with: + lfs: "true" + submodules: "false" + + - name: Checkout veda-auth submodule + uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 + with: + repository: "NASA-IMPACT/${{ env.DIRECTORY }}" + path: ${{ env.DIRECTORY }} + ref: ${{ vars.VEDA_AUTH_GIT_REF || 'main'}} + + - name: Configure AWS Credentials + uses: aws-actions/configure-aws-credentials@ececac1a45f3b08a01d2dd070d28d111c5fe6722 #v4.1.0 + with: + role-to-assume: ${{ secrets.DEPLOYMENT_ROLE_ARN }} + role-session-name: "gh-${{ env.ENVIRONMENT }}-auth-deployment" + aws-region: "us-west-2" + + - name: Run auth deployment + id: deploy_auth_stack + uses: "./veda-auth/.github/actions/cdk-deploy" + with: + dir: "${{ env.DIRECTORY }}" + env_aws_secret_name: ${{ vars.VEDA_AUTH_DEPLOYMENT_SECRET || vars.DEPLOYMENT_ENV_SECRET_NAME }} + + - name: Get Auth Stack Name + id: get_auth_stack + shell: bash + run: | + stack=$(jq 'keys_unsorted[0]' ${HOME}/cdk-outputs.json) + echo "auth_stackname=$stack" >> $GITHUB_OUTPUT + + outputs: + auth_stack_name: ${{ steps.get_auth_stack.outputs.auth_stackname }} + + deploy-veda-backend: + name: Deploy VEDA backend βš™οΈ + runs-on: ubuntu-latest + env: + DIRECTORY: veda-backend + ENVIRONMENT: ${{ inputs.environment }} + if: ${{ inputs.DEPLOY_BACKEND == 'true' }} + environment: ${{ inputs.environment }} + + steps: + - name: Checkout + uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 + with: + lfs: "true" + submodules: "false" + + - name: Checkout veda-backend submodule + uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 + with: + repository: "NASA-IMPACT/${{ env.DIRECTORY }}" + path: ${{ env.DIRECTORY }} + ref: ${{ vars.VEDA_BACKEND_GIT_REF || 'main'}} + + - name: Configure AWS Credentials + uses: aws-actions/configure-aws-credentials@ececac1a45f3b08a01d2dd070d28d111c5fe6722 #v4.1.0 + with: + role-to-assume: ${{ secrets.DEPLOYMENT_ROLE_ARN }} + role-session-name: "gh-${{ env.ENVIRONMENT }}-backend-deployment" + aws-region: "us-west-2" + + - name: Run deployment + uses: "./veda-backend/.github/actions/cdk-deploy" + id: deploy_backend_stack + with: + env_aws_secret_name: ${{ vars.DEPLOYMENT_ENV_SECRET_NAME }} + dir: "${{ env.DIRECTORY }}" + script_path: "${{ github.workspace }}/scripts/generate_env_file.py" + + - name: Get Backend Output Values + id: get_backend_stack + shell: bash + run: | + stack=$(jq 'keys_unsorted[0]' ${HOME}/cdk-outputs.json) + echo "backend_stackname=$stack" >> $GITHUB_OUTPUT + + raster_api_url=$(jq '.[keys_unsorted[0]].rasterapiurl' ${HOME}/cdk-outputs.json) + echo "raster_api_url=$raster_api_url" >> $GITHUB_OUTPUT + + ingest_api_url=$(jq '.[keys_unsorted[0]].ingestapiurl' ${HOME}/cdk-outputs.json) + echo "ingest_api_url=$ingest_api_url" >> $GITHUB_OUTPUT + + stac_api_url=$(jq '.[keys_unsorted[0]].stacapiurl' ${HOME}/cdk-outputs.json) + echo "stac_api_url=$stac_api_url" >> $GITHUB_OUTPUT + + stac_browser_bucket_name=$(jq '.[keys_unsorted[0]].stacbrowserbucketname' ${HOME}/cdk-outputs.json) + echo "stac_browser_bucket_name=$stac_browser_bucket_name" >> $GITHUB_OUTPUT + + outputs: + backend_stack_name: ${{ steps.get_backend_stack.outputs.backend_stackname }} + raster_api_url: ${{ steps.get_backend_stack.outputs.raster_api_url }} + ingest_api_url: ${{ steps.get_backend_stack.outputs.ingest_api_url }} + stac_api_url: ${{ steps.get_backend_stack.outputs.stac_api_url }} + stac_browser_bucket_name: ${{ steps.get_backend_stack.outputs.stac_browser_bucket_name }} + + deploy-veda-data-airflow-sm2a: + name: deploy VEDA data airflow SM2A πŸ›Έ + runs-on: ubuntu-latest + env: + DIRECTORY: veda-data-airflow + AWS_REGION: "us-west-2" + ENVIRONMENT: ${{ inputs.environment }} + if: ${{ inputs.DEPLOY_SM2A == 'true' }} + environment: ${{ inputs.environment }} + + steps: + - name: Checkout + uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 + with: + lfs: "true" + submodules: "false" + + - name: Checkout veda-data-airflow submodule + uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 + with: + repository: "NASA-IMPACT/${{ env.DIRECTORY }}" + path: ${{ env.DIRECTORY }} + ref: ${{ vars.VEDA_SM2A_DATA_AIRFLOW_GIT_REF || 'main'}} + + - name: Configure AWS Credentials + uses: aws-actions/configure-aws-credentials@ececac1a45f3b08a01d2dd070d28d111c5fe6722 #v4.1.0 + with: + role-to-assume: ${{ secrets.DEPLOYMENT_ROLE_ARN }} + role-session-name: "gh-${{ env.ENVIRONMENT }}-airflow-sm2a-deployment" + aws-region: "${{ env.AWS_REGION }}" + + - name: Run SM2A deployment + id: deploy_sm2a + uses: "./veda-data-airflow/.github/actions/terraform-deploy-sm2a" + with: + env-file: ".env" + env_aws_secret_name: ${{ vars.SM2A_ENVS_DEPLOYMENT_SECRET_NAME }} + dir: "${{ env.DIRECTORY }}" + script_path: "${{ github.workspace }}/scripts/generate_env_file.py" + + - name: Get SM2A Workflows API Endpoint + id: get_sm2a_workflows_api_endpoint + shell: bash + run: | + workflows_sm2a_api_value=$(cat ${HOME}/output_sm2a_workflows_endpoint.json 2>/dev/null | tr -d '"' | tr -d '\n' ) + echo "workflows_sm2a_api_value=${workflows_sm2a_api_value:-NA}" >> $GITHUB_OUTPUT + + outputs: + workflows_sm2a_api_value: ${{ steps.get_sm2a_workflows_api_endpoint.outputs.workflows_sm2a_api_value }} + + deploy-veda-features-api: + name: Deploy VEDA features-api πŸ—ΊοΈ + runs-on: ubuntu-latest + environment: ${{ inputs.environment }} + if: ${{ inputs.DEPLOY_FEATURES_API == 'true' }} + env: + DIRECTORY: veda-features-api-cdk + ENVIRONMENT: ${{ inputs.environment }} + + steps: + - name: Checkout + uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 + with: + lfs: "true" + submodules: "false" + + - name: Checkout veda-features-api submodule + uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 + with: + repository: "NASA-IMPACT/${{ env.DIRECTORY }}" + path: ${{ env.DIRECTORY }} + ref: ${{ vars.VEDA_FEATURES_API_GIT_REF || 'main'}} + + - name: Configure AWS Credentials + uses: aws-actions/configure-aws-credentials@ececac1a45f3b08a01d2dd070d28d111c5fe6722 #v4.1.0 + with: + role-to-assume: ${{ secrets.DEPLOYMENT_ROLE_ARN }} + role-session-name: "gh-${{ env.ENVIRONMENT }}-features-deployment" + aws-region: "us-west-2" + + - name: Run deployment + uses: "./veda-features-api-cdk/.github/actions/cdk-deploy" + id: deploy_features_stack + with: + env_aws_secret_name: ${{ vars.DEPLOYMENT_ENV_SECRET_NAME }} + dir: "${{ env.DIRECTORY }}" + script_path: "${{ github.workspace }}/scripts/generate_env_file.py" + + - name: Get Features API Stack Name + id: get_features_api_stack + shell: bash + run: | + stack=$(jq 'keys_unsorted[0]' ${HOME}/cdk-outputs.json) + echo "features_api_stackname=$stack" >> $GITHUB_OUTPUT + + outputs: + features_api_stack_name: ${{ steps.get_features_api_stack.outputs.features_api_stackname }} + + deploy-veda-monitoring: + name: deploy VEDA monitoring πŸ‘οΈ + runs-on: ubuntu-latest + env: + DIRECTORY: veda-monitoring + AWS_REGION: "us-west-2" + ENVIRONMENT: ${{ inputs.environment }} + GH_PAT_CHECK: ${{ secrets.GH_PAT }} + if: ${{ inputs.DEPLOY_MONITORING == 'true' }} + environment: ${{ inputs.environment }} + + steps: + - name: Checkout + if: ${{ env.GH_PAT_CHECK != '' }} + uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 + with: + lfs: "true" + submodules: "false" + + - name: Checkout veda-monitoring "submodule" + if: ${{ env.GH_PAT_CHECK != '' }} + uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 + with: + repository: "NASA-IMPACT/${{ env.DIRECTORY }}" + path: ${{ env.DIRECTORY }} + ref: ${{ vars.VEDA_MONITORING_GIT_REF || 'main' }} + token: ${{ secrets.GH_PAT }} + + - name: Configure AWS Credentials + if: ${{ env.GH_PAT_CHECK != '' }} + uses: aws-actions/configure-aws-credentials@ececac1a45f3b08a01d2dd070d28d111c5fe6722 #v4.1.0 + with: + role-to-assume: ${{ secrets.DEPLOYMENT_ROLE_ARN }} + role-session-name: "gh-${{ env.ENVIRONMENT }}-monitoring-deployment" + aws-region: "${{ env.AWS_REGION }}" + + - name: Run deployment + if: ${{ env.GH_PAT_CHECK != '' }} + uses: "./veda-monitoring/.github/actions/cdk-deploy" + with: + env_aws_secret_name: ${{ vars.DEPLOYMENT_ENV_SECRET_NAME }} + dir: "${{ env.DIRECTORY }}" + script_path: "${{ github.workspace }}/scripts/generate_env_file.py" + + deploy-titiler-multidim: + name: Deploy titiler-multidim 🌎 + runs-on: ubuntu-latest + env: + DIRECTORY: titiler-multidim + ENVIRONMENT: ${{ inputs.environment }} + environment: ${{ inputs.environment }} + if: ${{ inputs.DEPLOY_TITILER_MULTIDIM == 'true' }} + + steps: + - name: Checkout + uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 + with: + lfs: "true" + submodules: "false" + + - name: Checkout titiler-multidim submodule + uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 + with: + repository: "developmentseed/${{ env.DIRECTORY }}" + path: ${{ env.DIRECTORY }} + ref: ${{ vars.TITILER_MULTIDIM_GIT_REF || 'main'}} + + - name: Configure AWS Credentials + uses: aws-actions/configure-aws-credentials@ececac1a45f3b08a01d2dd070d28d111c5fe6722 #v4.1.0 + with: + role-to-assume: ${{ secrets.DEPLOYMENT_ROLE_ARN }} + role-session-name: "gh-${{ env.ENVIRONMENT }}-titiler-multidim-deployment" + aws-region: "us-west-2" + + - name: Run titiler-multidim deployment + id: deploy_titiler_multidim + uses: "./titiler-multidim/.github/actions/cdk-deploy" + with: + dir: "${{ env.DIRECTORY }}/infrastructure/aws" + env_aws_secret_name: ${{ vars.DEPLOYMENT_ENV_SECRET_NAME }} + script_path: "${{ github.workspace }}/scripts/generate_env_file.py" + + deploy-s3-disaster-recovery: + name: Deploy s3-disaster-recovery + runs-on: ubuntu-latest + env: + DIRECTORY: s3-disaster-recovery + ENVIRONMENT: ${{ inputs.environment }} + environment: ${{ inputs.environment }} + if: ${{ inputs.DEPLOY_S3_DISASTER_RECOVERY == 'true' }} + + steps: + - name: Checkout + uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 + with: + lfs: "true" + submodules: "false" + + - name: Checkout s3-disaster-recovery submodule + uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 + with: + repository: "NASA-IMPACT/${{ env.DIRECTORY }}" + path: ${{ env.DIRECTORY }} + ref: ${{ vars.S3_DISASTER_RECOVERY_GIT_REF || 'main'}} + + - name: Configure AWS Credentials + uses: aws-actions/configure-aws-credentials@ececac1a45f3b08a01d2dd070d28d111c5fe6722 #v4.1.0 + with: + role-to-assume: ${{ secrets.DEPLOYMENT_ROLE_ARN }} + role-session-name: "gh-${{ env.ENVIRONMENT }}-s3-disaster-recovery-deployment" + aws-region: "us-west-2" + + - name: Run s3-disaster-recovery deployment + id: deploy_s3-disaster-recovery + uses: "./s3-disaster-recovery/.github/workflows" + with: + env_file: ".env" + aws_region: ${{ vars.AWS_REGION }} + allow_batch_replication: ${{ vars.VEDA_S3_DISASTER_RECOV_ALLOW_BATCH_REPLICATION }} + source_bucket: ${{ vars.VEDA_S3_DISASTER_RECOV_SOURCE_BUCKET_NAME }} + destination_bucket: ${{ vars.VEDA_S3_DISASTER_RECOV_DESTINATION_BUCKET_NAME }} + env_aws_secret_name: ${{ vars.DEPLOYMENT_ENV_SECRET_NAME }} + dir: "${{ env.DIRECTORY }}" + script_path: "${{ github.workspace }}/scripts/generate_env_file.py" + + deploy-titiler-cmr: + name: Deploy titiler-cmr πŸ“š + runs-on: ubuntu-latest + env: + DIRECTORY: titiler-cmr + ENVIRONMENT: ${{ inputs.environment }} + environment: ${{ inputs.environment }} + if: ${{ inputs.DEPLOY_TITILER_CMR == 'true' }} + + steps: + - name: Checkout + uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 + with: + # curious why this is needed + lfs: "true" + submodules: "false" + + - name: Checkout titiler-cmr submodule + uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 + with: + repository: "developmentseed/${{ env.DIRECTORY }}" + path: ${{ env.DIRECTORY }} + ref: ${{ vars.TITILER_CMR_GIT_REF || 'develop'}} + + - name: Configure AWS Credentials + uses: aws-actions/configure-aws-credentials@ececac1a45f3b08a01d2dd070d28d111c5fe6722 #v4.1.0 + with: + role-to-assume: ${{ secrets.DEPLOYMENT_ROLE_ARN }} + role-session-name: "gh-${{ env.ENVIRONMENT }}-titiler-cmr-deployment" + aws-region: "us-west-2" + + - name: Run titiler-cmr deployment + id: deploy_titiler_cmr + uses: "./titiler-cmr/.github/actions/cdk-deploy" + with: + dir: "${{ env.DIRECTORY }}/infrastructure/aws" + env_aws_secret_name: ${{ vars.DEPLOYMENT_ENV_SECRET_NAME }} + script_path: "${{ github.workspace }}/scripts/generate_env_file.py" + + deploy-veda-routes: + name: deploy VEDA routes πŸ”€ + runs-on: ubuntu-latest + env: + DIRECTORY: veda-routes + AWS_REGION: "us-west-2" + ENVIRONMENT: ${{ github.event.inputs.environment }} + needs: [ deploy-veda-backend ] + if: ${{ github.event.inputs.DEPLOY_ROUTES == 'true' }} + environment: ${{ github.event.inputs.environment }} + + steps: + - name: Checkout + uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 + with: + lfs: "true" + submodules: "false" + + - name: Checkout veda-routes "submodule" + uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 + with: + repository: "NASA-IMPACT/${{ env.DIRECTORY }}" + path: ${{ env.DIRECTORY }} + ref: ${{ vars.VEDA_ROUTES_GIT_REF || 'dev' }} + token: ${{ secrets.GH_PAT }} + + - name: Configure AWS Credentials + uses: aws-actions/configure-aws-credentials@ececac1a45f3b08a01d2dd070d28d111c5fe6722 #v4.1.0 + with: + role-to-assume: ${{ secrets.DEPLOYMENT_ROLE_ARN }} + role-session-name: "gh-${{ env.ENVIRONMENT }}-routes-deployment" + aws-region: "${{ env.AWS_REGION }}" + + - name: Set up Python + uses: actions/setup-python@b64ffcaf5b410884ad320a9cfac8866006a109aa #v4.8.0 + with: + python-version: "3.10" + + - name: Setup python cache + uses: actions/cache@d4323d4df104b026a6aa633fdb11d772146be0bf #v4.2.2 + with: + path: ${{ env.pythonLocation }} + key: ${{ env.pythonLocation }}-${{ hashFiles('${{ github.workspace }}/${{ env.DIRECTORY }}/requirements.txt') }} + + - name: Install python dependencies + working-directory: ${{ env.DIRECTORY }} + shell: bash + run: | + pip install -r ../requirements.txt + pip install -r requirements.txt + + - name: Update Veda Routes secrets with dependency outputs + shell: bash + run: | + python3 "${{ github.workspace }}/scripts/update_secret_with_inputs.py" \ + --secret-id ${{ vars.DEPLOYMENT_ENV_ROUTES_SECRET_NAME }} \ + --prefix=VEDA_ \ + --raster_api_url=${{ needs.deploy-veda-backend.outputs.raster_api_url }} \ + --ingest_api_url=${{ needs.deploy-veda-backend.outputs.ingest_api_url }} \ + --stac_api_url=${{ needs.deploy-veda-backend.outputs.stac_api_url }} \ + --stac_browser_bucket_name=${{ needs.deploy-veda-backend.outputs.stac_browser_bucket_name }} + + - name: Run deployment + uses: "./veda-routes/.github/actions/cdk-deploy" + with: + env_aws_secret_name: ${{ vars.DEPLOYMENT_ENV_ROUTES_SECRET_NAME }} + dir: "${{ env.DIRECTORY }}" + + + test-deployment: + name: Test Deployment πŸ‘¨πŸ»β€πŸ”¬ + runs-on: ubuntu-latest + timeout-minutes: 15 # Normally takes ~30s + needs: [ deploy-veda-backend ] + env: + DIRECTORY: integration_test + ENVIRONMENT: ${{ inputs.environment }} + AWS_DEFAULT_REGION: us-west-2 + environment: ${{ inputs.environment }} + steps: + - uses: actions/checkout@ee0669bd1cc54295c223e0bb666b733df41de1c5 #v2.7.0 + + - name: Set up Python + uses: actions/setup-python@b64ffcaf5b410884ad320a9cfac8866006a109aa #v4.8.0 + with: + python-version: "3.10" + cache: "pip" + cache-dependency-path: | + ${{ github.workspace }}/${{ env.DIRECTORY }}/requirements.txt + + - name: Configure AWS Credentials + uses: aws-actions/configure-aws-credentials@ececac1a45f3b08a01d2dd070d28d111c5fe6722 #v4.1.0 + with: + role-to-assume: ${{ secrets.DEPLOYMENT_ROLE_ARN }} + role-session-name: "gh-${{ env.ENVIRONMENT }}-integration-test" + aws-region: "${{ env.AWS_DEFAULT_REGION }}" + + - name: Install python dependencies + shell: bash + working-directory: ${{ env.DIRECTORY }} + run: | + pip install -r requirements.txt + python "${{ github.workspace }}/scripts/generate_env_file.py" --secret-id ${{ vars.DEPLOYMENT_ENV_SECRET_NAME }} --stack-names "${{ needs.deploy-veda-backend.outputs.backend_stack_name }}" + + - name: Integration test + shell: bash + working-directory: ${{ env.DIRECTORY }} + run: | + pytest . + + run-playwright-checks: + name: playwright end to end tests + if: ${{ needs.define-environment.outputs.env_name }} + runs-on: ubuntu-latest + needs: [ deploy-veda-backend ] + steps: + - name: Checkout + uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 + with: + repository: NASA-IMPACT/veda-config + ref: add-playwright + + - name: Use Node.js 16 + uses: actions/setup-node@1a4442cacd436585916779262731d5b162bc6ec7 #v3.8.2 + with: + node-version: 16 + + - name: Run veda setup + run: ./.veda/setup + + - name: Checkout generate_env script + uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 + with: + sparse-checkout: | + /scripts/generate_env_file.py + sparse-checkout-cone-mode: false + + - name: Generate .env file from cdk.out + shell: bash + working-directory: ${{ env.DIRECTORY }} + run: | + pip install -r requirements.txt + python "${{ github.workspace }}/scripts/generate_env_file.py" --secret-id ${{ vars.DEPLOYMENT_ENV_SECRET_NAME }} --stack-names "${{ needs.deploy-veda-backend.outputs.backend_stack_name }}" + + - name: Load .env file + id: dotenv + uses: falti/dotenv-action@a33be0b8cf6a6e6f1b82cc9f3782061ab1022be5 #v1.1.4 + + - name: Create env.local file + env: + MAPBOX_TOKEN: ${{secrets.MAPBOX_TOKEN}} + run: | + touch .env.local + echo "API_RASTER_ENDPOINT=${{steps.dotenv.outputs.VEDA_RASTER_URL}}" >> .env.local + echo "API_STAC_ENDPOINT=${{steps.dotenv.outputs.VEDA_STAC_URL}}" >> .env.local + + echo "Testing deployment URLs" + cat .env.local + + echo "MAPBOX_TOKEN="$MAPBOX_TOKEN" >> .env.local + + - name: Install Playwright Browsers + run: yarn playwright install --with-deps + + - name: Playwright tests + run: yarn test:e2e + - uses: actions/upload-artifact@c24449f33cd45d4826c6702db7e49f7cdb9b551d #v3.2.1-node20 + if: always() + with: + name: playwright-report + path: playwright-report/ + retention-days: 30 + diff --git a/.github/workflows/dispatch.yml b/.github/workflows/dispatch.yml new file mode 100644 index 0000000..8f476b3 --- /dev/null +++ b/.github/workflows/dispatch.yml @@ -0,0 +1,124 @@ +name: Dispatch β›Ÿ + +permissions: + id-token: write + contents: write + issues: write + +on: + workflow_dispatch: + inputs: + environment: + type: environment + required: true + description: Environment to deploy to + DEPLOY_AUTH: + type: boolean + required: true + default: false + description: DEPLOY_AUTH + DEPLOY_BACKEND: + type: boolean + required: true + default: true + description: DEPLOY_BACKEND + DEPLOY_FEATURES_API: + type: boolean + required: true + default: false + description: DEPLOY_FEATURES_API + DEPLOY_ROUTES: + type: boolean + required: true + default: false + description: DEPLOY_ROUTES + DEPLOY_SM2A: + type: boolean + required: true + default: false + description: DEPLOY_SM2A + DEPLOY_MONITORING: + type: boolean + required: true + default: false + description: DEPLOY_MONITORING + DEPLOY_TITILER_MULTIDIM: + type: boolean + required: true + default: false + description: DEPLOY_TITILER_MULTIDIM + DEPLOY_S3_DISASTER_RECOVERY: + type: boolean + required: true + default: false + description: DEPLOY_S3_DISASTER_RECOVERY + DEPLOY_TITILER_CMR: + type: boolean + required: true + default: false + description: DEPLOY_TITILER_CMR + +run-name: | + Dispatch to ${{ inputs.environment }} + AUTH=${{ inputs.DEPLOY_AUTH }} + BACKEND=${{ inputs.DEPLOY_BACKEND }} + FEATURES=${{ inputs.DEPLOY_FEATURES_API }} + ROUTES=${{ inputs.DEPLOY_ROUTES }} + SM2A=${{ inputs.DEPLOY_SM2A }} + MONITORING=${{ inputs.DEPLOY_MONITORING }} + TITILER_MULTIDIM=${{ inputs.DEPLOY_TITILER_MULTIDIM }} + TITILER_CMR=${{ inputs.DEPLOY_TITILER_CMR }} β›Ÿ + +env: + DEPLOY_AUTH: ${{ github.event.inputs.DEPLOY_AUTH}} + DEPLOY_BACKEND: ${{ github.event.inputs.DEPLOY_BACKEND }} + DEPLOY_FEATURES_API: ${{ github.event.inputs.DEPLOY_FEATURES_API }} + DEPLOY_ROUTES: ${{ github.event.inputs.DEPLOY_ROUTES }} + DEPLOY_SM2A: ${{ github.event.inputs.DEPLOY_SM2A }} + DEPLOY_MONITORING: ${{ github.event.inputs.DEPLOY_MONITORING }} + DEPLOY_TITILER_MULTIDIM: ${{ github.event.inputs.DEPLOY_TITILER_MULTIDIM }} + DEPLOY_S3_DISASTER_RECOVERY: ${{ github.event.inputs.DEPLOY_S3_DISASTER_RECOVERY }} + DEPLOY_TITILER_CMR: ${{ github.event.inputs.DEPLOY_TITILER_CMR }} + +jobs: + check-environment: + runs-on: ubuntu-latest + name: Got ${{ github.event.inputs.environment }} + steps: + - name: Validation + uses: trstringer/manual-approval@v1 + if: ${{ github.event.inputs.environment == 'ghgc-mcp-production-blue' || github.event.inputs.environment == 'mcp-prod' }} + timeout-minutes: 60 # The approver will have 1 hour to approve this request + # Why 1h? Because GitHub App tokens expire after 1 hour which implies duration + # for the approval cannot exceed 60 minutes or the job will fail due to bad credentials + with: + secret: ${{ secrets.GITHUB_TOKEN }} + approvers: amarouane-ABDELHAK,slesaad,anayeaye,smohiudd,botanical,ividito,stephenkilbourn + minimum-approvals: 1 + issue-title: "Deploying to ${{ github.event.inputs.environment }}" + issue-body: "Please approve or deny the deployment" + + deploy-veda-components: + name: Deploy VEDA Components + uses: "./.github/workflows/deploy.yml" + needs: check-environment + with: + environment: ${{ github.event.inputs.environment }} + DEPLOY_AUTH: ${{ github.event.inputs.DEPLOY_AUTH}} + DEPLOY_BACKEND: ${{ github.event.inputs.DEPLOY_BACKEND }} + DEPLOY_FEATURES_API: ${{ github.event.inputs.DEPLOY_FEATURES_API }} + DEPLOY_ROUTES: ${{ github.event.inputs.DEPLOY_ROUTES }} + DEPLOY_SM2A: ${{ github.event.inputs.DEPLOY_SM2A }} + DEPLOY_MONITORING: ${{ github.event.inputs.DEPLOY_MONITORING }} + DEPLOY_TITILER_MULTIDIM: ${{ github.event.inputs.DEPLOY_TITILER_MULTIDIM }} + DEPLOY_S3_DISASTER_RECOVERY: ${{ github.event.inputs.DEPLOY_S3_DISASTER_RECOVERY }} + DEPLOY_TITILER_CMR: ${{ github.event.inputs.DEPLOY_TITILER_CMR }} + secrets: inherit + + update-deployment-status: + name: Update Deployment Status + uses: "./.github/workflows/update_deployment_status.yml" + needs: deploy-veda-components + with: + environment: ${{ github.event.inputs.environment }} + secrets: inherit diff --git a/.github/workflows/update_deployment_status.yml b/.github/workflows/update_deployment_status.yml new file mode 100644 index 0000000..1d14cdc --- /dev/null +++ b/.github/workflows/update_deployment_status.yml @@ -0,0 +1,59 @@ +name: Update Deployment Status + +on: + workflow_call: + inputs: + environment: + required: true + type: string + +permissions: + contents: write + +jobs: + update-status-md: + runs-on: ubuntu-latest + environment: ${{ inputs.environment }} + + steps: + - name: Checkout deployment-state branch + uses: actions/checkout@v4 + with: + ref: deployment-state + token: ${{ secrets.GITHUB_TOKEN }} + + - name: Set up Python + uses: actions/setup-python@v5 + with: + python-version: 3.x + + - name: Run update script + run: | + python scripts/update_deployment_status.py \ + --env "${{ inputs.environment }}" + env: + VEDA_AUTH_GIT_REF: ${{ vars.VEDA_AUTH_GIT_REF }} + VEDA_BACKEND_GIT_REF: ${{ vars.VEDA_BACKEND_GIT_REF }} + VEDA_FEATURES_API_GIT_REF: ${{ vars.VEDA_FEATURES_API_GIT_REF }} + VEDA_ROUTES_GIT_REF: ${{ vars.VEDA_ROUTES_GIT_REF }} + VEDA_SM2A_DATA_AIRFLOW_GIT_REF: ${{ vars.VEDA_SM2A_DATA_AIRFLOW_GIT_REF }} + VEDA_MONITORING_GIT_REF: ${{ vars.VEDA_MONITORING_GIT_REF }} + VEDA_TITILER_MULTIDIM_GIT_REF: ${{ vars.VEDA_TITILER_MULTIDIM_GIT_REF }} + VEDA_S3_DISASTER_RECOVERY_GIT_REF: ${{ vars.VEDA_S3_DISASTER_RECOVERY_GIT_REF }} + VEDA_TITILER_CMR_GIT_REF: ${{ vars.VEDA_TITILER_CMR_GIT_REF }} + DEPLOY_BACKEND: ${{ github.event.inputs.DEPLOY_BACKEND }} + DEPLOY_AUTH: ${{ github.event.inputs.DEPLOY_AUTH }} + DEPLOY_FEATURES_API: ${{ github.event.inputs.DEPLOY_FEATURES_API }} + DEPLOY_ROUTES: ${{ github.event.inputs.DEPLOY_ROUTES }} + DEPLOY_SM2A: ${{ github.event.inputs.DEPLOY_SM2A }} + DEPLOY_MONITORING: ${{ github.event.inputs.DEPLOY_MONITORING }} + DEPLOY_TITILER_MULTIDIM: ${{ github.event.inputs.DEPLOY_TITILER_MULTIDIM }} + DEPLOY_S3_DISASTER_RECOVERY: ${{ github.event.inputs.DEPLOY_S3_DISASTER_RECOVERY }} + DEPLOY_TITILER_CMR: ${{ github.event.inputs.DEPLOY_TITILER_CMR }} + - name: Commit and push updated deployment-status.md + run: | + git config user.name "github-actions" + git config user.email "github-actions@github.com" + git add deployment-status.md + git commit -m "Update deployment refs for ${{ inputs.environment }}" + git push origin deployment-state diff --git a/.gitmodules b/.gitmodules index 31265d2..6fd90a8 100644 --- a/.gitmodules +++ b/.gitmodules @@ -7,3 +7,21 @@ [submodule "veda-backend"] path = veda-backend url = https://github.com/NASA-IMPACT/veda-backend.git +[submodule "veda-monitoring"] + path = veda-monitoring + url = https://github.com/NASA-IMPACT/veda-monitoring.git +[submodule "veda-features-api-cdk"] + path = veda-features-api-cdk + url = git@github.com:NASA-IMPACT/veda-features-api-cdk.git +[submodule "veda-routes"] + path = veda-routes + url = git@github.com:NASA-IMPACT/veda-routes.git +[submodule "titiler-multidim"] + path = titiler-multidim + url = https://github.com/developmentseed/titiler-multidim.git +[submodule "s3-disaster-recovery"] + path = s3-disaster-recovery + url = https://github.com/NASA-IMPACT/s3-disaster-recovery.git +[submodule "titiler-cmr"] + path = titiler-cmr + url = https://github.com/developmentseed/titiler-cmr.git diff --git a/README.md b/README.md index 501df91..9a59ff4 100644 --- a/README.md +++ b/README.md @@ -3,31 +3,46 @@ Deploy full VEDA stack easily. # How to deploy? ## Steps -1. Create an Environment in the repository. See [Requirements](#requirements) on details of creating the environment. +1. Create a new Github Environment in the repository. See [Requirements](#requirements) on details of creating the environment. 2. Add necessary env vars in the Environment -3. Go to Actions. Select "CI/CD" workflow. Select "Run workflow", choose the environment from step 1. Click "Run workflow." +3. Go to Actions. Select "Dispatch" workflow. Select "Run workflow", choose the environment from step 1. Select the components to dispatch and then "Run workflow." +4. (Optional) To add a new component in veda-deploy see [Add New Components](#add-new-components). # Requirements -## Environment -Each environment needs a minimum of -### Secrets +Adding new deployment environments requires admin permissions for this veda-deploy repository. New environments are added by entering project settings and selecting `Environments` from the code and automation menu. The environment naming convention is `-`, i.e. `smce-staging`. As more environments are added, this convention will need to be updated. + +## GitHub Environment +Each veda-deploy Github Environment needs Environment Secrets and Variables configured in the GitHub UI Settings for this veda-deploy project as well as detailed key-value AWS Secrets Manager secret(s) with configuration for the deployment of all components. + +### GitHub Environment Secrets +GitHub Environment secret(s) configured in the GitHub UI settings for this veda-deploy repo: `DEPLOYMENT_ROLE_ARN` - oidc role with permissions to deploy -### Variables -`DEPLOYMENT_ENV_SECRET_NAME` - the AWS secrets manager secret name with the required env vars. See AWS Secrets Requirements for what env vars are needed. -`PROJECT_PREFIX` (TBD) -`STAGE` (TBD) +### GitHub Environment Variables +GitHub Environment variables need to be set in the GitHub UI project settings. There should be one variable for each AWS Secrets Manager secret name. There should be one variable for each component indicating which GitHub reference to use to deploy that component via checking out that Github reference in the git submodule. + +More instructions on these Github environment variables is provided below. + +#### AWS Secrets Manager Secret Name(s) + +`DEPLOYMENT_ENV_SECRET_NAME` - the AWS secrets manager secret name with the required component env vars. See [AWS Secrets Requirements](#aws-secrets-requirements) for what env vars are needed. Note that the individual submodule GitHub repositories should be consulted for the most up to date environment variable names and explanations. -### Variables (Optional) -Git Ref for each project to use to deploy. Can be branch name, release tag or commit hash. Anything that works with `git checkout`. +`SM2A_ENVS_DEPLOYMENT_SECRET_NAME` - the AWS secrets manager secret name with env vars specific to a SM2A deployment. See [AWS Secrets Requirements for SM2A](#aws-secrets-requirements-for-sm2a) for what env vars are needed. -`VEDA_AUTH_GIT_REF` -`VEDA_BACKEND_GIT_REF` -`VEDA_DATA_AIRFLOW_GIT_REF` -`VEDA_FEATURES_API_GIT_REF` +#### GitHub References +Git Ref for each project to use to deploy. Can be branch name, release tag or commit hash. Anything that works with `git checkout`. Below are some examples of the components that may be configured in a GitHub Environment. + +```bash +VEDA_AUTH_GIT_REF= +VEDA_BACKEND_GIT_REF= +VEDA_FEATURES_API_GIT_REF= +VEDA_SM2A_DATA_AIRFLOW_GIT_REF= +``` #### AWS Secrets Requirements +A single secret is used to store the configuration for all components for a given GitHub Environment. In some cases, an additional secret may be needed if a component does not have uniquely namespaced `.env` parameters and requires custom values--for example, the Self Managed Apache Airflow (SM2A) component requires a separate [SM2A secret](#aws-secrets-requirements-for-sm2a) in the AWS Secrets Manager. + ```bash AWS_ACCOUNT_ID=****** AWS_REGION=****** @@ -43,4 +58,88 @@ STATE_BUCKET_KEY=***** STATE_DYNAMO_TABLE=***** VEDA_STAC_PATH_PREFIX=***** VEDA_RASTER_PATH_PREFIX=***** -``` \ No newline at end of file +``` + +#### AWS Secrets Requirements for SM2A +```bash +AIRFLOW_UID=****** +PREFIX=****** +VPC_ID=****** +STATE_BUCKET_NAME=****** +STATE_BUCKET_KEY=****** +STATE_DYNAMO_TABLE=****** +PRIVATE_SUBNETS_TAGNAME=****** +PUBLIC_SUBNETS_TAGNAME=****** +AIRFLOW_FERNET_KEY=****** +AIRFLOW_DB_NAME=****** +AIRFLOW_DB_USERNAME=****** +AIRFLOW_DB_PASSWORD=****** +PERMISSION_BOUNDARIES_ARN=****** +DOMAIN_NAME=****** +STAGE=****** +TF_VAR_gh_app_client_id=****** +TF_VAR_gh_app_client_secret=****** +TF_VAR_gh_team_name=****** +TF_VAR_subdomain=****** +``` + + + +# Add New Components +> [!IMPORTANT] +> This section is intended to add a new component to an existing configured environment, see [How to Deploy](#how-to-deploy) to start from scratch. Please read the full overview before starting; some steps overlap. + +## Overview +- [Add deployment action to component github repository](#add-deployment-action-to-component-github-repository) + +- [Store `.env` configuration in AWS Secrets Manager](#store-env-configuration-in-aws-secrets-manager) + +- [Add component submodule to veda-deploy](#add-component-submodule-to-veda-deploy) + +- [Extend composite dispatched deployment action with an optional component job that uses the component submodule and environment secret](#extend-composite-dispatched-deployment-action) + +- [Add new component release version and environment secret name to veda-deploy environment(s)](#add-new-component-release-version-and-environment-secret-name-to-veda-deploy-environments) + +- [Configure domain and custom routes](#configure-domain-and-custom-routes) + +## Add deployment action to component github repository +Dispatches from veda-deploy are composed of deployment actions imported from the component projects' repository, after it has been installed as [git submodule](https://git-scm.com/book/en/v2/Git-Tools-Submodules). The management of all configuration, testing, and deployment concerns is managed within the component's GitHub repository (not in veda-deploy). + +Create a new `cdk-deploy/action.yml` in the component project's repository. On a dispatch, the configured release version of the project will be checked out and executed on the veda-deploy GitHub runner. + +To keep the components modular, each action should include all necessary steps for deployment including Python and Node setup steps. While veda-deploy uses the same runner to deploy all components, it should not be assumed that the runner already has all needed installations and environment configuration from other components (unless a dependency is configured for the job using needs: {upstream-job-name}). + +> [!TIP] +> Most deployments require [custom environment configuration](#store-env-configuration-in-aws-secrets-manager) that can be retrieved from the AWS Secrets Manager for the deployment. See [veda-backend/scripts/get-env.sh](https://github.com/NASA-IMPACT/veda-backend/blob/develop/scripts/get-env.sh) for an example environment configuration utility. + +### Examples +- Veda-auth [cdk-deploy/action.yml](https://github.com/NASA-IMPACT/veda-auth/blob/main/.github/actions/cdk-deploy/action.yml) provides a simple example of adding configuration from an AWS Secrets Manager secret and running `cdk deploy` for an imported submodule. +- Veda-backend [cdk-deploy/action.yml](https://github.com/NASA-IMPACT/veda-backend/blob/develop/.github/actions/cdk-deploy/action.yml) contains logic to run tests before deploying components. +- This [CICD workflow in veda-backend](https://github.com/NASA-IMPACT/veda-backend/blob/develop/.github/workflows/cicd.yml) demonstrates importing the cdk-deploy/action on a merge event to test the deployment in a dev enviornment. + +## Store `.env` configuration in AWS Secrets Manager +AWS environment specific configuration like VPC ID and a Permission Boundary Policy Name are already included in a core key-value secret that can be loaded into the GitHub runner environment by your action. This core secret is set in the GitHub Variable `DEPLOYMENT_ENV_SECRET_NAME` (See [AWS Secrets Requirements](#aws-secrets-requirements) for the core variable names). Additional required configuration variables should be added to this core secret as needed for the new component. If your component requires custom configuration that conflicts with the core secret, a new secret can be configured--see the implementation of a custom secret for [SM2A](#aws-secrets-requirements-for-sm2a). + +> [!NOTE] +> 1. For higher security environments, a permissions boundary policy needs to be identified. +> 2. The qualifier of the CDK Toolkit bootstrapped for the target environment must be provided if not using the default toolkit. + +## Add component submodule to veda-deploy +Add your component submodule to [.gitmodules](https://github.com/NASA-IMPACT/veda-deploy/blob/dev/.gitmodules). Submodules are checked out on the GitHub runner when your component is deployed. + +``` +[submodule "my-project"] + path = my-project + url = git@github.com:NASA-IMPACT/my-project.git +``` + +## Extend composite dispatched deployment action + +1. Add a [dispatch flag in .github/workflows/dispatch.yml](https://github.com/NASA-IMPACT/veda-deploy/blob/dev/.github/workflows/dispatch.yml#L66) for the component you are adding. As in `DEPLOY_MY_COMPONENT: ${{ github.event.inputs.DEPLOY_MY_COMPONENT }}` +2. Update the [dispatch message](https://github.com/NASA-IMPACT/veda-deploy/blob/dev/.github/workflows/dispatch.yml#L46) to include your component. Eventually this will get too long and will need some thought but it is currently helpful to filter the actions and identify specific dispatches. +3. Transfer the above dispatch information to the [.github/workflows/deploy.yml workflow_call](https://github.com/NASA-IMPACT/veda-deploy/blob/dev/.github/workflows/deploy.yml#L10). The deploy action is called after the environment is set by the dispatch and for production environments is only executed after the dispatch has been approved by a maintainer. +4. Add a new [named job to deploy.yml](https://github.com/NASA-IMPACT/veda-deploy/blob/dev/.github/workflows/deploy.yml#L218) that checks the condition the deployment condition for the component and, when true, checks out the deployment action from the component's GitHub repository and passes in any relevant information like the configuration environment secret name. + + +## Configure domain and custom routes +VEDA platform components include options for custom subdomains and custom root paths. Coordinate how your custom resource should be configured with the team maintaining the target environment you are deploying to. \ No newline at end of file diff --git a/integration_test/test_api_health.py b/integration_test/test_api_health.py index fd44927..9530ba7 100644 --- a/integration_test/test_api_health.py +++ b/integration_test/test_api_health.py @@ -4,18 +4,93 @@ load_dotenv() +def _get_link(obj: dict, rel: str) -> str: + """get rel link from a stac object""" + return next((l for l in obj.get("links") if l["rel"]==rel), None) def test_stac_url_returns_200(): - endpoint = os.getenv("VEDA_STAC_URL") - stac_path_prefix = os.getenv("VEDA_STAC_PATH_PREFIX") - url = f"{endpoint.rstrip('/')}/{stac_path_prefix}/_mgmt/ping" - response = requests.get(url) - assert response.status_code == 200 + base_url = os.getenv("VEDA_STAC_URL") + stac_root_path = os.getenv("VEDA_STAC_ROOT_PATH") + custom_host = os.getenv("VEDA_CUSTOM_HOST", None) + disable_default_apigw = os.getenv("VEDA_DISABLE_DEFAULT_APIGW_ENDPOINT", False) + health_endpoint = "_mgmt/ping" + + if not disable_default_apigw: + url = f"{base_url}{health_endpoint}" # APIGW base url includes trailing / + print(f"Checking APIGW stac-api {url=}") + response = requests.get(url) + assert response.status_code == 200 + + if custom_host: + url = f"https://{custom_host}/{stac_root_path.rstrip('/')}/{health_endpoint}" + print(f"Checking custom host stac-api {url=}") + response = requests.get(url) + assert response.status_code == 200 + def test_raster_url_returns_200(): - endpoint = os.getenv("VEDA_RASTER_URL") - raster_path_prefix = os.getenv("VEDA_RASTER_PATH_PREFIX") - url = f"{endpoint.rstrip('/')}/{raster_path_prefix}/healthz" - response = requests.get(url) - assert response.status_code == 200 + base_url = os.getenv("VEDA_RASTER_URL") + raster_root_path = os.getenv("VEDA_RASTER_ROOT_PATH") + custom_host = os.getenv("VEDA_CUSTOM_HOST", None) + disable_default_apigw = os.getenv("VEDA_DISABLE_DEFAULT_APIGW_ENDPOINT", False) + health_endpoint = "healthz" + + if not disable_default_apigw: + url = os.path.join(base_url, health_endpoint) + print(f"Checking APIGW raster-api {url=}") + response = requests.get(url) + assert response.status_code == 200 + + if custom_host: + url = f"https://{custom_host}/{raster_root_path.rstrip('/')}/{health_endpoint}" + print(f"Checking custom host raster-api {url=}") + response = requests.get(url) + assert response.status_code == 200 + +def test_stac_item_next_link_returns_200(): + base_url = os.getenv("VEDA_STAC_URL") + stac_root_path = os.getenv("VEDA_STAC_ROOT_PATH") + custom_host = os.getenv("VEDA_CUSTOM_HOST", None) + disable_default_apigw = os.getenv("VEDA_DISABLE_DEFAULT_APIGW_ENDPOINT", False) + collections_endpoint = "collections" + + if not disable_default_apigw: + url = f"{base_url}/{collections_endpoint}" + print(f"Checking APIGW stac-api {url=}") + response = requests.get(url) + assert response.status_code == 200 + + if custom_host: + url = f"https://{custom_host}/{stac_root_path.rstrip('/')}/{collections_endpoint}" + print(f"Checking links for custom host stac-api {url=}") + response = requests.get(url) + assert response.status_code == 200 + + # Walk check root path propagation through dynamic links when using custom host + collections = response.json().get("collections") + next_links_untested = True + + while next_links_untested: + for collection in collections: + + # All collections should have a dynamicaly generated items link, even if no items exist + items_link = _get_link(collection, "items") + assert items_link + items_url = items_link.get("href") + assert items_url + items_response = requests.get(items_url) + assert items_response.status_code == 200 + items_json = items_response.json() + features = items_json.get("features") + + # The default page size is 10 + if len(features) >= 10: + items_next_link = _get_link(items_json, "next") + assert items_next_link + next_url = items_next_link.get("href") + assert next_url + next_response = requests.get(next_url) + assert next_response.status_code == 200 + next_links_untested = False + break diff --git a/integration_test/test_stac_pagination_next_link.py b/integration_test/test_stac_pagination_next_link.py new file mode 100644 index 0000000..ed504f0 --- /dev/null +++ b/integration_test/test_stac_pagination_next_link.py @@ -0,0 +1,94 @@ +import requests +from dotenv import load_dotenv +import os +import pytest + +load_dotenv() + +def _get_link(obj: dict, rel: str) -> str: + """ + Helper function to find a specific link by its 'rel' type in a STAC object's "links" array. + """ + if not obj or not obj.get("links"): + return None + return next((link for link in obj.get("links") if link.get("rel") == rel), None) + +def test_collection_pagination_next_link_is_valid(): + """ + Validates that for collections with enough items to require pagination, + the 'next' link provided in the item list is a valid URL that returns a 200 OK status + and does not contain a CloudFront cache error. + + This test fetches all collections and then individually checks their item lists for pagination links. + It focuses on the custom host configuration as it's the primary way + dynamically generated links are used by end-users. + """ + custom_host = os.getenv("VEDA_CUSTOM_HOST") + stac_root_path = os.getenv("VEDA_STAC_ROOT_PATH", "") + + if not custom_host: + pytest.skip("VEDA_CUSTOM_HOST environment variable not set. Skipping pagination test.") + + # Construct the URL to the main collections endpoint + collections_url = f"https://{custom_host}/{stac_root_path.strip('/')}/collections" + print(f"Starting pagination test. Fetching collections from: {collections_url}") + + # Fetch all collections + try: + collections_response = requests.get(collections_url) + collections_response.raise_for_status() # Fail fast if collections endpoint is down + collections = collections_response.json().get("collections", []) + except (requests.exceptions.RequestException, ValueError) as e: + pytest.fail(f"Could not fetch or parse collections from {collections_url}. Error: {e}") + + # Iterate through each collection to find one with pagination + for collection_summary in collections: + collection_id = collection_summary.get("id") + if not collection_id: + print("Skipping a collection that is missing an 'id'.") + continue + + # Get the link to the collection's items. The 'items' link is within the collection's own links. + items_link = _get_link(collection_summary, "items") + if not (items_link and items_link.get("href")): + print(f"Skipping collection '{collection_id}': No 'items' link found.") + continue + + items_url = items_link["href"] + print(f"Checking items for collection '{collection_id}' at: {items_url}") + + # Fetch the first page of items for the collection + try: + items_response = requests.get(items_url) + if items_response.status_code != 200: + print(f"Warning: Could not fetch items for '{collection_id}'. Status: {items_response.status_code}") + continue + items_json = items_response.json() + except (requests.exceptions.RequestException, ValueError) as e: + print(f"Warning: Could not fetch or parse items for '{collection_id}'. Error: {e}") + continue + + # Check if a 'next' link exists on the first page of items + next_link = _get_link(items_json, "next") + if next_link and next_link.get("href"): + next_url = next_link["href"] + print(f" - Found 'next' link: {next_url}") + + # Make a GET request to the 'next' URL + try: + next_page_response = requests.get(next_url) + + # Assert that the link is valid and returns a 200 OK status + assert next_page_response.status_code == 200, f"'next' link for {collection_id} failed with status {next_page_response.status_code}" + print(f" - Success! 'next' link returned status {next_page_response.status_code}.") + + x_cache_header = next_page_response.headers.get("x-cache", "").lower() + assert "error from cloudfront" not in x_cache_header, f"CloudFront error detected in x-cache header for {collection_id}: {x_cache_header}" + print(f" - Success! No CloudFront error found in x-cache header.") + + # Once we've successfully tested one 'next' link, we can exit to keep the test fast. + break + except requests.exceptions.RequestException as e: + pytest.fail(f"Request for 'next' link URL {next_url} failed. Error: {e}") + else: + print(f" - No 'next' link found for '{collection_id}' (collection may have less than one page of items).") \ No newline at end of file diff --git a/reuirements.txt b/requirements.txt similarity index 100% rename from reuirements.txt rename to requirements.txt diff --git a/s3-disaster-recovery b/s3-disaster-recovery new file mode 160000 index 0000000..1488ec0 --- /dev/null +++ b/s3-disaster-recovery @@ -0,0 +1 @@ +Subproject commit 1488ec04ab924968d6c677a3d73fc52703c5abc6 diff --git a/scripts/generate_env_file.py b/scripts/generate_env_file.py index 75536e4..4fb86aa 100644 --- a/scripts/generate_env_file.py +++ b/scripts/generate_env_file.py @@ -51,18 +51,32 @@ def generate_env_file(secret_id, stack_names=None, out_file=".env"): parser.add_argument( "--stack-names", dest="stack_names", - help="Cloudformation Stack names (comma separated)", + help="Cloudformation Stack names (comma separated). If the flag is used without a value, or if the flag is omitted entirely, it defaults to None.", + required=False, default=None, + nargs='?', + const=None, + ) + parser.add_argument( + "--env-file", + dest="env_file", + help=".env file to write to", + required=False, + default=".env", ) args = parser.parse_args() - secret_id, stack_names = ( + secret_id, stack_names, env_file = ( args.secret_id, - args.stack_names + args.stack_names, + args.env_file ) + + + generate_env_file( stack_names=stack_names, secret_id=secret_id, - out_file=".env" + out_file=env_file ) diff --git a/scripts/update_deployment_status.py b/scripts/update_deployment_status.py new file mode 100644 index 0000000..f76272b --- /dev/null +++ b/scripts/update_deployment_status.py @@ -0,0 +1,121 @@ +#!/usr/bin/env python3 +import argparse, os, re +from collections import defaultdict, OrderedDict +from datetime import datetime, timezone + + +DEPLOYMENT_FILE = "deployment-status.md" + +# component_id, DEPLOY_* env, VEDA_*_GIT_REF env, display name in MD, repo URL +COMPONENTS = [ + ("auth", "DEPLOY_AUTH", "VEDA_AUTH_GIT_REF", "AUTH", "https://github.com/NASA-IMPACT/veda-auth"), + ("backend", "DEPLOY_BACKEND", "VEDA_BACKEND_GIT_REF", "BACKEND", "https://github.com/NASA-IMPACT/veda-backend"), + ("features_api", "DEPLOY_FEATURES_API", "VEDA_FEATURES_API_GIT_REF", "FEATURES_API", "https://github.com/NASA-IMPACT/veda-features-api-cdk"), + ("routes", "DEPLOY_ROUTES", "VEDA_ROUTES_GIT_REF", "ROUTES", "https://github.com/NASA-IMPACT/veda-routes"), + ("sm2a", "DEPLOY_SM2A", "VEDA_SM2A_DATA_AIRFLOW_GIT_REF", "SM2A", "https://github.com/NASA-IMPACT/veda-sm2a"), + ("monitoring", "DEPLOY_MONITORING", "VEDA_MONITORING_GIT_REF", "MONITORING", "https://github.com/NASA-IMPACT/veda-monitoring"), + ("titiler_multidim", "DEPLOY_TITILER_MULTIDIM", "VEDA_TITILER_MULTIDIM_GIT_REF", "TITILER_MULTIDIM", "https://github.com/developmentseed/titiler-multidim"), + ("s3_dr", "DEPLOY_S3_DISASTER_RECOVERY", "VEDA_S3_DISASTER_RECOVERY_GIT_REF", "S3_DR", "https://github.com/NASA-IMPACT/s3-disaster-recovery"), + ("titiler_cmr", "DEPLOY_TITILER_CMR", "VEDA_TITILER_CMR_GIT_REF", "TITILER_CMR", "https://github.com/developmentseed/titiler-cmr"), +] + +ROW_RE = re.compile(r'^\|\s*([^|]+?)\s*\|\s*([^|]+?)\s*\|\s*([^|]+?)\s*\|$') +ENV_RE = re.compile(r'^##\s+(.+)\s*$') + +def load_state(path: str) -> dict: + """Parse existing markdown into { env: { DISPLAY_NAME: ref } }.""" + state = defaultdict(dict) + if not os.path.exists(path): + return state + + current_env = None + with open(path, "r", encoding="utf-8") as f: + for raw in f: + line = raw.rstrip("\n") + + m_env = ENV_RE.match(line) + if m_env: + current_env = m_env.group(1).strip() + continue + + if not current_env or not line.startswith("|"): + continue + + m_row = ROW_RE.match(line) + if not m_row: + continue + + c1 = m_row.group(1).strip() + c2 = m_row.group(2).strip() + c3 = m_row.group(3).strip() + + if c1.lower() == "component": + continue + if set(c1) == {"-"} and set(c2) == {"-"}: + continue + + state[current_env][c1] = (c2, c3) + return state + +def make_ref_link(repo_url: str, ref: str) -> str: + if not ref: + return "" + return f"[{ref}]({repo_url}/tree/{ref})" + +def write_state(path: str, state: dict): + ordered_envs = sorted(state.keys()) + with open(path, "w", encoding="utf-8") as f: + f.write("# Deployment Status\n\nIf a component or environment is not listed, it has not yet been deployed through veda-deploy.\n\n") + for env in ordered_envs: + f.write(f"## {env}\n") + f.write("| Component | Git Ref | Updated (UTC) |\n") + f.write("|-----------|---------|---------------|\n") + for _, _, _, disp, repo_url in COMPONENTS: + entry = state[env].get(disp, ("","")) + print(f"Processing {disp} for {env}: {entry}") + ref, upd = entry + ref_link = ref if ref.startswith("[") else make_ref_link(repo_url, ref) + f.write(f"| {disp} | {ref_link} | {upd} |\n") + f.write("\n") + +def collect_updates_from_env() -> dict: + """Return { DISPLAY_NAME: ref } for components that should be updated.""" + updates = {} + ts = datetime.now(timezone.utc).strftime('%Y-%m-%d %H:%M:%S') + for _, deploy_env, ref_env, display, _ in COMPONENTS: + print(f"Checking {display} for environment variables: {deploy_env}, {ref_env}") + deploy_flag = os.getenv(deploy_env, "") + ref_val = os.getenv(ref_env, "") + print(f" {deploy_env} = {deploy_flag}, {ref_env} = {ref_val}") + if deploy_flag == "true" and ref_val.strip(): # this is weird but GHA won't let the flag be cast as a boolean + updates[display] = (ref_val.strip(), ts) + return updates + +def main(): + print("Updating deployment status...") + ap = argparse.ArgumentParser(description="Update deployment-status.md with current refs.") + ap.add_argument("--env", required=True, help="Target environment name (must match your GH environment)") + ap.add_argument("--file", default=DEPLOYMENT_FILE, help="Path to deployment status markdown") + args = ap.parse_args() + + state = load_state(args.file) + print(f"Loaded existing state for {args.env}: {state.get(args.env, {})}") + # Ensure env exists in state even if empty + _ = state[args.env] + + updates = collect_updates_from_env() + print(f"Collected updates: {updates}") + if not updates: + # Nothing to do; write existing state back unchanged (no-op) + write_state(args.file, state) + return + + # Apply updates for selected components only + for display, ref in updates.items(): + state[args.env][display] = ref + + write_state(args.file, state) + +if __name__ == "__main__": + print("Starting deployment status update script...") + main() \ No newline at end of file diff --git a/scripts/update_secret_with_inputs.py b/scripts/update_secret_with_inputs.py new file mode 100644 index 0000000..abd7f72 --- /dev/null +++ b/scripts/update_secret_with_inputs.py @@ -0,0 +1,61 @@ +import argparse +import json +import re + +import boto3 + +_ARG_REGEX = r'--(.*)=(.*)' + + +def update_secret_with_inputs(): + parser = argparse.ArgumentParser( + description=""" + **WARNING** This is destructive, if an input is provided that's name already exists in + the AWS Secret, it will be overridden. + + --- + + Takes in N inputs in the form --input-name=value and inserts them into the provided + AWS SecretsManager secret. If a prefix is provided, it will be appended to the input name + + This assumes that the SecretString value is a stringified JSON object + + For example, with no prefix, an input of --my-secret-item=hello will be inserted as: + + MY_SECRET_ITEM=hello + + Whereas with a prefix of MY_PREFIX, an input of --my-secret-item=hello will be inserted as: + + MY_PREFIX_MY_SECRET_ITEM=hello + """, + formatter_class=argparse.RawTextHelpFormatter + ) + + parser.add_argument("--prefix", default="", required=False, help="Optional prefix to append to inputs names") + parser.add_argument("--secret-id", required=True, help="The ARN or Name of the AWS SecretsManager secret to update") + + known_args, unknown_args = parser.parse_known_args() + + if not (secret_id := known_args.secret_id): + raise Exception("An AWS SecretsManager secret id is required") + + values_to_add_to_secret = {} + + for arg in unknown_args: + if match := re.match(_ARG_REGEX, arg): + secret_entry_name = known_args.prefix.upper() + match.group(1).upper().replace("-", '_') + secret_entry_value = match.group(2) + values_to_add_to_secret[secret_entry_name] = secret_entry_value + + secrets_manager_client = boto3.client("secretsmanager") + secret = secrets_manager_client.get_secret_value(SecretId=secret_id) + secret_value = json.loads(secret["SecretString"]) + + for k, v in values_to_add_to_secret.items(): + secret_value[k] = v + + secrets_manager_client.put_secret_value(SecretId=secret_id, SecretString=json.dumps(secret_value)) + + +if __name__ == "__main__": + update_secret_with_inputs() diff --git a/titiler-multidim b/titiler-multidim new file mode 160000 index 0000000..0e0181b --- /dev/null +++ b/titiler-multidim @@ -0,0 +1 @@ +Subproject commit 0e0181b13f985a9fcd5f4c6c0681342ae52d5369 diff --git a/veda-auth b/veda-auth new file mode 160000 index 0000000..ff1bb97 --- /dev/null +++ b/veda-auth @@ -0,0 +1 @@ +Subproject commit ff1bb97dac80adac112d2264c34f7d9eead76729 diff --git a/veda-backend b/veda-backend new file mode 160000 index 0000000..0e2bc31 --- /dev/null +++ b/veda-backend @@ -0,0 +1 @@ +Subproject commit 0e2bc3105ba188bf89d5ef274b89dc44b1a20612 diff --git a/veda-data-airflow b/veda-data-airflow new file mode 160000 index 0000000..3d0ab13 --- /dev/null +++ b/veda-data-airflow @@ -0,0 +1 @@ +Subproject commit 3d0ab1363903bfc6cb27a0421dfa7047c636568f diff --git a/veda-features-api-cdk b/veda-features-api-cdk new file mode 160000 index 0000000..8e32f0f --- /dev/null +++ b/veda-features-api-cdk @@ -0,0 +1 @@ +Subproject commit 8e32f0faecddb8d4d00e6b9b767132f5e8d286d3 diff --git a/veda-monitoring b/veda-monitoring new file mode 160000 index 0000000..ca38129 --- /dev/null +++ b/veda-monitoring @@ -0,0 +1 @@ +Subproject commit ca381298927ed9dee3b985b01ac2446aa289eedb diff --git a/veda-routes b/veda-routes new file mode 160000 index 0000000..8ac256a --- /dev/null +++ b/veda-routes @@ -0,0 +1 @@ +Subproject commit 8ac256af750d0d0011d9c9a86349bba9b5efa8a8