@@ -26,97 +26,98 @@ jobs:
2626 steps :
2727 - name : Checkout code
2828 uses : actions/checkout@v2
29- # - name: Authenticate to Google Cloud PROD project
30- # id: gcloud_auth_prod
31- # uses: google-github-actions/auth@v2
32- # with:
33- # credentials_json: ${{ secrets.PROD_GCP_MOBILITY_FEEDS_SA_KEY }}
34- #
35- # - name: GCloud Setup PROD
36- # uses: google-github-actions/setup-gcloud@v2
37- #
38- # - name: Get PROD SQL service account
39- # run: |
40- # SERVICE_ACCOUNT=$(gcloud sql instances describe "mobilitydata-database-instance" --project=$SOURCE_PROJECT_ID --format="value(serviceAccountEmailAddress)")
41- # echo "SOURCE_SQL_SERVICE_ACCOUNT=$SERVICE_ACCOUNT" >> $GITHUB_ENV
42- # echo "Destination SQL Service Account: $SERVICE_ACCOUNT"
43- #
44- # - name: Authenticate to Google Cloud QA project
45- # id: gcloud_auth_qa
46- # uses: google-github-actions/auth@v2
47- # with:
48- # credentials_json: ${{ secrets.QA_GCP_MOBILITY_FEEDS_SA_KEY }}
49- #
50- # - name: GCloud Setup QA
51- # uses: google-github-actions/setup-gcloud@v2
52- #
53- # - name: Create DB dump bucket and give permissions
54- # run: |
55- # BUCKET_PROJECT_ID=$DEST_PROJECT_ID
56- #
57- # # Check if the bucket already exists
58- # if ! gsutil ls -b "gs://${DUMP_BUCKET_NAME}" &> /dev/null; then
59- # echo "Bucket doesn't exist. Creating..."
60- # gsutil mb -l $GCP_REGION -p $BUCKET_PROJECT_ID "gs://${DUMP_BUCKET_NAME}"
61- # else
62- # echo "Bucket already exists."
63- # fi
64- #
65- # # Give write permission for the source sql instance to write to the bucket
66- # gsutil iam ch serviceAccount:$SOURCE_SQL_SERVICE_ACCOUNT:objectAdmin gs://$DUMP_BUCKET_NAME
67- #
68- # # Get the service account for the QA DB and give read permission to the bucket
69- # DEST_SQL_SERVICE_ACCOUNT=$(gcloud sql instances describe $DB_INSTANCE_NAME --format="value(serviceAccountEmailAddress)")
70- # echo "Destination SQL Service Account: $DEST_SQL_SERVICE_ACCOUNT"
71- #
72- # # Give read-write permission on the bucket to the destination sql instance
73- # gsutil iam ch serviceAccount:$DEST_SQL_SERVICE_ACCOUNT:objectAdmin gs://$DUMP_BUCKET_NAME
74- #
75- # - name: Authenticate to Google Cloud PROD project Again
76- # uses: google-github-actions/auth@v2
77- # with:
78- # credentials_json: ${{ secrets.PROD_GCP_MOBILITY_FEEDS_SA_KEY }}
79- #
80- # - name: GCloud Setup PROD again
81- # uses: google-github-actions/setup-gcloud@v2
82- #
83- # - name: Dump the PROD DB
84- # run: |
85- # gcloud sql export sql $DB_INSTANCE_NAME gs://$DUMP_BUCKET_NAME/$DUMP_FILE_NAME --database=$SOURCE_DATABASE_NAME --quiet
86- #
29+
30+ - name : Authenticate to Google Cloud PROD project
31+ id : gcloud_auth_prod
32+ uses : google-github-actions/auth@v2
33+ with :
34+ credentials_json : ${{ secrets.PROD_GCP_MOBILITY_FEEDS_SA_KEY }}
35+
36+ - name : GCloud Setup PROD
37+ uses : google-github-actions/setup-gcloud@v2
38+
39+ - name : Get PROD SQL service account
40+ run : |
41+ SERVICE_ACCOUNT=$(gcloud sql instances describe "mobilitydata-database-instance" --project=$SOURCE_PROJECT_ID --format="value(serviceAccountEmailAddress)")
42+ echo "SOURCE_SQL_SERVICE_ACCOUNT=$SERVICE_ACCOUNT" >> $GITHUB_ENV
43+ echo "Destination SQL Service Account: $SERVICE_ACCOUNT"
44+
45+ - name : Authenticate to Google Cloud QA project
46+ id : gcloud_auth_qa
47+ uses : google-github-actions/auth@v2
48+ with :
49+ credentials_json : ${{ secrets.QA_GCP_MOBILITY_FEEDS_SA_KEY }}
50+
51+ - name : GCloud Setup QA
52+ uses : google-github-actions/setup-gcloud@v2
53+
54+ - name : Create DB dump bucket and give permissions
55+ run : |
56+ BUCKET_PROJECT_ID=$DEST_PROJECT_ID
57+
58+ # Check if the bucket already exists
59+ if ! gsutil ls -b "gs://${DUMP_BUCKET_NAME}" &> /dev/null; then
60+ echo "Bucket doesn't exist. Creating..."
61+ gsutil mb -l $GCP_REGION -p $BUCKET_PROJECT_ID "gs://${DUMP_BUCKET_NAME}"
62+ else
63+ echo "Bucket already exists."
64+ fi
65+
66+ # Give write permission for the source sql instance to write to the bucket
67+ gsutil iam ch serviceAccount:$SOURCE_SQL_SERVICE_ACCOUNT:objectAdmin gs://$DUMP_BUCKET_NAME
68+
69+ # Get the service account for the QA DB and give read permission to the bucket
70+ DEST_SQL_SERVICE_ACCOUNT=$(gcloud sql instances describe $DB_INSTANCE_NAME --format="value(serviceAccountEmailAddress)")
71+ echo "Destination SQL Service Account: $DEST_SQL_SERVICE_ACCOUNT"
72+
73+ # Give read-write permission on the bucket to the destination sql instance
74+ gsutil iam ch serviceAccount:$DEST_SQL_SERVICE_ACCOUNT:objectAdmin gs://$DUMP_BUCKET_NAME
75+
76+ - name : Authenticate to Google Cloud PROD project Again
77+ uses : google-github-actions/auth@v2
78+ with :
79+ credentials_json : ${{ secrets.PROD_GCP_MOBILITY_FEEDS_SA_KEY }}
80+
81+ - name : GCloud Setup PROD again
82+ uses : google-github-actions/setup-gcloud@v2
83+
84+ - name : Dump the PROD DB
85+ run : |
86+ gcloud sql export sql $DB_INSTANCE_NAME gs://$DUMP_BUCKET_NAME/$DUMP_FILE_NAME --database=$SOURCE_DATABASE_NAME --quiet
87+
8788 - name : Authenticate to Google Cloud QA project Again
8889 uses : google-github-actions/auth@v2
8990 with :
9091 credentials_json : ${{ secrets.QA_GCP_MOBILITY_FEEDS_SA_KEY }}
9192
9293 - name : GCloud Setup QA Again
9394 uses : google-github-actions/setup-gcloud@v2
94- #
95- # - name: QA backup and import dump into the QA DB
96- # run: |
97- # # Dump the QA database as a backup
98- # # According to chatgpt,
99- # # This is Google's recommended, safe method and doesn’t require direct access to the DB. It runs the export
100- # # in a way that avoids locking the database and works from GCP itself (so no traffic leaves GCP).
101- # gcloud sql export sql $DB_INSTANCE_NAME gs://$DUMP_BUCKET_NAME/qa-db-dump-backup.sql --database=$SOURCE_DATABASE_NAME --quiet
102- #
103- # # Delete the existing database
104- # gcloud sql databases delete $DEST_DATABASE_NAME --instance=$DB_INSTANCE_NAME --quiet
105- #
106- # # Create a the new database
107- # gcloud sql databases create $DEST_DATABASE_NAME --instance=$DB_INSTANCE_NAME
108- #
109- # # Import the dump into the QA database
110- # # The exported sql contains statements that require authentication as user postgres.
111- # # In theory we could dump the DB without these statements, with:
112- # # pg_dump --no-owner --no-privileges -d your_database > clean_dump.sql.
113- #
114- # export PGPASSWORD=$DEST_DATABASE_PASSWORD
115- # gcloud sql import sql $DB_INSTANCE_NAME gs://$DUMP_BUCKET_NAME/$DUMP_FILE_NAME --database=$DEST_DATABASE_NAME --user=$DEST_DATABASE_IMPORT_USER --quiet
116- #
117- # - name: Delete dump file from bucket
118- # run: |
119- # gsutil rm gs://$DUMP_BUCKET_NAME/$DUMP_FILE_NAME
95+
96+ - name : QA backup and import dump into the QA DB
97+ run : |
98+ # Dump the QA database as a backup
99+ # According to chatgpt,
100+ # This is Google's recommended, safe method and doesn’t require direct access to the DB. It runs the export
101+ # in a way that avoids locking the database and works from GCP itself (so no traffic leaves GCP).
102+ gcloud sql export sql $DB_INSTANCE_NAME gs://$DUMP_BUCKET_NAME/qa-db-dump-backup.sql --database=$SOURCE_DATABASE_NAME --quiet
103+
104+ # Delete the existing database
105+ gcloud sql databases delete $DEST_DATABASE_NAME --instance=$DB_INSTANCE_NAME --quiet
106+
107+ # Create a the new database
108+ gcloud sql databases create $DEST_DATABASE_NAME --instance=$DB_INSTANCE_NAME
109+
110+ # Import the dump into the QA database
111+ # The exported sql contains statements that require authentication as user postgres.
112+ # In theory we could dump the DB without these statements, with:
113+ # pg_dump --no-owner --no-privileges -d your_database > clean_dump.sql.
114+
115+ export PGPASSWORD=$DEST_DATABASE_PASSWORD
116+ gcloud sql import sql $DB_INSTANCE_NAME gs://$DUMP_BUCKET_NAME/$DUMP_FILE_NAME --database=$DEST_DATABASE_NAME --user=$DEST_DATABASE_IMPORT_USER --quiet
117+
118+ - name : Delete dump file from bucket
119+ run : |
120+ gsutil rm gs://$DUMP_BUCKET_NAME/$DUMP_FILE_NAME
120121
121122 - name : Load secrets from 1Password
122123@@ -154,7 +155,7 @@ jobs:
154155 $$;
155156 EOF
156157
157- cat <<'EOF' | psql -h localhost -p 5454 -U data_feeds_user -d $DEST_DATABASE_NAME
158+ cat <<'EOF' | psql -h localhost -p 5454 -U postgres -d $DEST_DATABASE_NAME
158159 UPDATE feed
159160 SET feed_contact_email = REPLACE(feed_contact_email, '@', '_') || '@mobilitydata.org'
160161 WHERE feed_contact_email IS NOT NULL
0 commit comments