Deployment #97
Workflow file for this run
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
| name: Deployment | |
| on: | |
| workflow_dispatch: | |
| jobs: | |
| build_package_and_deploy: | |
| name: Build, package and deploy | |
| runs-on: ubuntu-latest | |
| timeout-minutes: 90 | |
| env: | |
| AWS_DEFAULT_REGION: ${{ secrets.AWS_DEFAULT_REGION_DEPLOY }} | |
| AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID_DEPLOY }} | |
| AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY_DEPLOY }} | |
| AWS_DEFAULT_ACCOUNT: ${{ secrets.AWS_ACCOUNT_ID }} | |
| steps: | |
| - uses: actions/checkout@v6 | |
| - uses: actions/setup-node@v6 | |
| with: | |
| node-version: 22 | |
| cache: "npm" | |
| - name: Install All Dependencies | |
| run: npm run install:all | |
| - name: Compile project | |
| run: npm run build | |
| - name: Generate distribution packages | |
| run: npm run package | |
| - name: Install uv | |
| uses: astral-sh/setup-uv@v7 | |
| with: | |
| version: "0.7.*" | |
| enable-cache: true | |
| - name: Install deployment environment | |
| id: install_deploy_env | |
| run: | | |
| # install deployment environment with eoapi-cdk from build | |
| uv sync --group deploy | |
| uv pip install dist/python/*.gz | |
| cd integration_tests/cdk | |
| npm install | |
| cd - | |
| # use short commit SHA to name stacks | |
| - uses: benjlevesque/short-sha@v3.0 | |
| id: short-sha | |
| with: | |
| length: 6 | |
| - name: Deploy test stack | |
| id: deploy_step | |
| env: | |
| PROJECT_ID: ${{ steps.short-sha.outputs.sha }} | |
| run: | | |
| # synthesize the stack | |
| cd integration_tests/cdk | |
| uv run npx cdk synth --debug --all --require-approval never | |
| # deploy the stack | |
| uv run npx cdk deploy --ci --all --require-approval never | |
| cd - | |
| - name: Get stack outputs and run operational checks | |
| id: operational_checks | |
| env: | |
| PROJECT_ID: ${{ steps.short-sha.outputs.sha }} | |
| run: | | |
| cd integration_tests/cdk | |
| echo "=== Retrieving Stack Outputs ===" | |
| # Get list of deployed stacks | |
| DEPLOYED_STACKS=$(uv run npx cdk list) | |
| echo "Deployed stacks: $DEPLOYED_STACKS" | |
| # Create outputs file | |
| echo "{}" > stack_outputs.json | |
| # Get outputs from each stack | |
| for STACK_NAME in $DEPLOYED_STACKS; do | |
| echo "Processing stack: $STACK_NAME" | |
| # Try to get outputs using AWS CLI | |
| STACK_OUTPUTS=$(aws cloudformation describe-stacks \ | |
| --stack-name "$STACK_NAME" \ | |
| --query 'Stacks[0].Outputs' \ | |
| --output json 2>/dev/null || echo "[]") | |
| if [ "$STACK_OUTPUTS" != "[]" ] && [ "$STACK_OUTPUTS" != "null" ]; then | |
| echo "Found outputs for $STACK_NAME:" | |
| echo "$STACK_OUTPUTS" | jq . | |
| # Convert to key-value format and merge with existing outputs | |
| TEMP_OUTPUTS=$(echo "$STACK_OUTPUTS" | jq -r 'if . then [.[] | select(.OutputKey and .OutputValue) | {(.OutputKey): .OutputValue}] | add // {} else {} end') | |
| echo "$TEMP_OUTPUTS" > temp_stack_outputs.json | |
| # Merge with existing outputs | |
| jq -s '.[0] * .[1]' stack_outputs.json temp_stack_outputs.json > merged.json | |
| mv merged.json stack_outputs.json | |
| else | |
| echo "No outputs found for $STACK_NAME" | |
| fi | |
| done | |
| echo "=== Final Combined Outputs ===" | |
| cat stack_outputs.json | jq . | |
| echo "=== Running Operational Checks ===" | |
| echo "Available CloudFormation output keys:" | |
| cat stack_outputs.json | jq -r 'keys[]' | |
| STAC_API_URL=$(cat stack_outputs.json | jq -r ' | |
| to_entries[] | | |
| select(.key | test("pgstacapistacapioutput"; "i")) | | |
| .value' | head -1) | |
| TITILER_PGSTAC_API_URL=$(cat stack_outputs.json | jq -r ' | |
| to_entries[] | | |
| select(.key | test("titilerpgstacapioutput"; "i")) | | |
| .value' | head -1) | |
| TIPG_API_URL=$(cat stack_outputs.json | jq -r ' | |
| to_entries[] | | |
| select(.key | test("tipgapioutput"; "i")) | | |
| .value' | head -1) | |
| echo "Extracted URLs:" | |
| echo "STAC_API_URL: $STAC_API_URL" | |
| echo "TITILER_PGSTAC_API_URL: $TITILER_PGSTAC_API_URL" | |
| echo "TIPG_API_URL: $TIPG_API_URL" | |
| # Array of API URLs to check | |
| declare -a API_HEALTH_ENDPOINTS=( | |
| "STAC_API_URL:${STAC_API_URL}_mgmt/health" | |
| "TITILER_PGSTAC_API_URL:${TITILER_PGSTAC_API_URL}healthz" | |
| "TIPG_API_URL:${TIPG_API_URL}healthz" | |
| ) | |
| # Check each API | |
| echo "=== Sleeping for 5 minutes to ensure everything is running ===" | |
| sleep 300 | |
| for API_INFO in "${API_HEALTH_ENDPOINTS[@]}"; do | |
| API_NAME=$(echo "$API_INFO" | cut -d: -f1) | |
| API_URL=$(echo "$API_INFO" | cut -d: -f2-) | |
| if [ -n "$API_URL" ] && [ "$API_URL" != "null" ]; then | |
| echo "Checking $API_NAME at: $API_URL" | |
| HTTP_STATUS=$(curl -s -o /dev/null -w "%{http_code}" --max-time 30 "$API_URL" || echo "000") | |
| if [ "$HTTP_STATUS" = "200" ]; then | |
| echo "✅ $API_NAME returned 200" | |
| else | |
| echo "❌ $API_NAME returned $HTTP_STATUS" | |
| exit 1 | |
| fi | |
| else | |
| echo "⚠️ $API_NAME URL not found in stack outputs" | |
| fi | |
| done | |
| echo "=== Operational Checks Complete ===" | |
| cd - | |
| - name: Tear down any infrastructure | |
| if: always() | |
| env: | |
| PROJECT_ID: ${{ steps.short-sha.outputs.sha }} | |
| run: | | |
| cd integration_tests/cdk | |
| # run this only if we find a 'cdk.out' directory, which means there might be things to tear down | |
| if [ -d "cdk.out" ]; then | |
| cd - | |
| cd integration_tests/cdk | |
| # see https://github.com/aws/aws-cdk/issues/24946 | |
| # this didn't work : rm -f cdk.out/synth.lock | |
| # so we just duplicate the cdk output to cdk-destroy.out | |
| uv run npx cdk destroy --output cdk-destroy.out --ci --all --force | |
| fi |