@@ -24,94 +24,130 @@ jobs:
2424 GCP_FEED_BASTION_SSH_KEY : ${{ secrets.GCP_FEED_BASTION_SSH_KEY }}
2525
2626 steps :
27- - name : Authenticate to Google Cloud PROD project
28- id : gcloud_auth_prod
29- uses : google-github-actions/auth@v2
27+ # - name: Authenticate to Google Cloud PROD project
28+ # id: gcloud_auth_prod
29+ # uses: google-github-actions/auth@v2
30+ # with:
31+ # credentials_json: ${{ secrets.PROD_GCP_MOBILITY_FEEDS_SA_KEY }}
32+ #
33+ # - name: GCloud Setup PROD
34+ # uses: google-github-actions/setup-gcloud@v2
35+ #
36+ # - name: Get PROD SQL service account
37+ # run: |
38+ # SERVICE_ACCOUNT=$(gcloud sql instances describe "mobilitydata-database-instance" --project=$SOURCE_PROJECT_ID --format="value(serviceAccountEmailAddress)")
39+ # echo "SOURCE_SQL_SERVICE_ACCOUNT=$SERVICE_ACCOUNT" >> $GITHUB_ENV
40+ # echo "Destination SQL Service Account: $SERVICE_ACCOUNT"
41+ #
42+ # - name: Authenticate to Google Cloud QA project
43+ # id: gcloud_auth_qa
44+ # uses: google-github-actions/auth@v2
45+ # with:
46+ # credentials_json: ${{ secrets.QA_GCP_MOBILITY_FEEDS_SA_KEY }}
47+ #
48+ # - name: GCloud Setup QA
49+ # uses: google-github-actions/setup-gcloud@v2
50+ #
51+ # - name: Create DB dump bucket and give permissions
52+ # run: |
53+ # BUCKET_PROJECT_ID=$DEST_PROJECT_ID
54+ #
55+ # # Check if the bucket already exists
56+ # if ! gsutil ls -b "gs://${DUMP_BUCKET_NAME}" &> /dev/null; then
57+ # echo "Bucket doesn't exist. Creating..."
58+ # gsutil mb -l $GCP_REGION -p $BUCKET_PROJECT_ID "gs://${DUMP_BUCKET_NAME}"
59+ # else
60+ # echo "Bucket already exists."
61+ # fi
62+ #
63+ # # Give write permission for the source sql instance to write to the bucket
64+ # gsutil iam ch serviceAccount:$SOURCE_SQL_SERVICE_ACCOUNT:objectAdmin gs://$DUMP_BUCKET_NAME
65+ #
66+ # # Get the service account for the QA DB and give read permission to the bucket
67+ # DEST_SQL_SERVICE_ACCOUNT=$(gcloud sql instances describe $DB_INSTANCE_NAME --format="value(serviceAccountEmailAddress)")
68+ # echo "Destination SQL Service Account: $DEST_SQL_SERVICE_ACCOUNT"
69+ #
70+ # # Give read-write permission on the bucket to the destination sql instance
71+ # gsutil iam ch serviceAccount:$DEST_SQL_SERVICE_ACCOUNT:objectAdmin gs://$DUMP_BUCKET_NAME
72+ #
73+ # - name: Authenticate to Google Cloud PROD project Again
74+ # uses: google-github-actions/auth@v2
75+ # with:
76+ # credentials_json: ${{ secrets.PROD_GCP_MOBILITY_FEEDS_SA_KEY }}
77+ #
78+ # - name: GCloud Setup PROD again
79+ # uses: google-github-actions/setup-gcloud@v2
80+ #
81+ # - name: Dump the PROD DB
82+ # run: |
83+ # gcloud sql export sql $DB_INSTANCE_NAME gs://$DUMP_BUCKET_NAME/$DUMP_FILE_NAME --database=$SOURCE_DATABASE_NAME --quiet
84+ #
85+ # - name: Authenticate to Google Cloud QA project Again
86+ # uses: google-github-actions/auth@v2
87+ # with:
88+ # credentials_json: ${{ secrets.QA_GCP_MOBILITY_FEEDS_SA_KEY }}
89+ #
90+ # - name: GCloud Setup QA Again
91+ # uses: google-github-actions/setup-gcloud@v2
92+ #
93+ # - name: QA backup and import dump into the QA DB
94+ # run: |
95+ # # Dump the QA database as a backup
96+ # # According to chatgpt,
97+ # # This is Google's recommended, safe method and doesn’t require direct access to the DB. It runs the export
98+ # # in a way that avoids locking the database and works from GCP itself (so no traffic leaves GCP).
99+ # gcloud sql export sql $DB_INSTANCE_NAME gs://$DUMP_BUCKET_NAME/qa-db-dump-backup.sql --database=$SOURCE_DATABASE_NAME --quiet
100+ #
101+ # # Delete the existing database
102+ # gcloud sql databases delete $DEST_DATABASE_NAME --instance=$DB_INSTANCE_NAME --quiet
103+ #
104+ # # Create a the new database
105+ # gcloud sql databases create $DEST_DATABASE_NAME --instance=$DB_INSTANCE_NAME
106+ #
107+ # # Import the dump into the QA database
108+ # # The exported sql contains statements that require authentication as user postgres.
109+ # # In theory we could dump the DB without these statements, with:
110+ # # pg_dump --no-owner --no-privileges -d your_database > clean_dump.sql.
111+ #
112+ # export PGPASSWORD=$DEST_DATABASE_PASSWORD
113+ # gcloud sql import sql $DB_INSTANCE_NAME gs://$DUMP_BUCKET_NAME/$DUMP_FILE_NAME --database=$DEST_DATABASE_NAME --user=$DEST_DATABASE_IMPORT_USER --quiet
114+ #
115+ # - name: Delete dump file from bucket
116+ # run: |
117+ # gsutil rm gs://$DUMP_BUCKET_NAME/$DUMP_FILE_NAME
118+
119+ - name : Load secrets from 1Password
120+ 30121 with :
31- credentials_json : ${{ secrets.PROD_GCP_MOBILITY_FEEDS_SA_KEY }}
32-
33- - name : GCloud Setup PROD 1
34- uses : google-github-actions/setup-gcloud@v2
35-
36- - name : Get PROD SQL service account
37- run : |
38- SERVICE_ACCOUNT=$(gcloud sql instances describe "mobilitydata-database-instance" --project=$SOURCE_PROJECT_ID --format="value(serviceAccountEmailAddress)")
39- echo "SOURCE_SQL_SERVICE_ACCOUNT=$SERVICE_ACCOUNT" >> $GITHUB_ENV
40- echo "Destination SQL Service Account: $SERVICE_ACCOUNT"
41-
42- - name : Authenticate to Google Cloud QA project
43- id : gcloud_auth_qa
44- uses : google-github-actions/auth@v2
45- with :
46- credentials_json : ${{ secrets.QA_GCP_MOBILITY_FEEDS_SA_KEY }}
47-
48- - name : GCloud Setup 2
49- uses : google-github-actions/setup-gcloud@v2
50-
51- - name : Create DB dump bucket and give permissions
122+ export-env : true # Export loaded secrets as environment variables
123+ env :
124+ OP_SERVICE_ACCOUNT_TOKEN : ${{ secrets.OP_SERVICE_ACCOUNT_TOKEN }}
125+ GCP_FEED_SSH_USER : " op://rbiv7rvkkrsdlpcrz3bmv7nmcu/GCP_FEED_SSH_USER/username"
126+ GCP_FEED_BASTION_NAME : " op://rbiv7rvkkrsdlpcrz3bmv7nmcu/GCP_FEED_BASTION_NAME/username"
127+ GCP_FEED_BASTION_SSH_KEY : " op://rbiv7rvkkrsdlpcrz3bmv7nmcu/GCP_FEED_BASTION_SSH_KEY/private key"
128+
129+ - name : Tunnel
52130 run : |
53- BUCKET_PROJECT_ID=$DEST_PROJECT_ID
54-
55- # Check if the bucket already exists
56- if ! gsutil ls -b "gs://${DUMP_BUCKET_NAME}" &> /dev/null; then
57- echo "Bucket doesn't exist. Creating..."
58- gsutil mb -l $GCP_REGION -p $BUCKET_PROJECT_ID "gs://${DUMP_BUCKET_NAME}"
59- else
60- echo "Bucket already exists."
61- fi
62-
63- # Give write permission for the source sql instance to write to the bucket
64- gsutil iam ch serviceAccount:$SOURCE_SQL_SERVICE_ACCOUNT:objectAdmin gs://$DUMP_BUCKET_NAME
65-
66- # Get the service account for the QA DB and give read permission to the bucket
67- DEST_SQL_SERVICE_ACCOUNT=$(gcloud sql instances describe $DB_INSTANCE_NAME --format="value(serviceAccountEmailAddress)")
68- echo "Destination SQL Service Account: $DEST_SQL_SERVICE_ACCOUNT"
69-
70- # Give read-write permission on the bucket to the destination sql instance
71- gsutil iam ch serviceAccount:$DEST_SQL_SERVICE_ACCOUNT:objectAdmin gs://$DUMP_BUCKET_NAME
72-
73- - name : Authenticate to Google Cloud PROD project Again
74- uses : google-github-actions/auth@v2
75- with :
76- credentials_json : ${{ secrets.PROD_GCP_MOBILITY_FEEDS_SA_KEY }}
77-
78- - name : GCloud Setup PROD 2
79- uses : google-github-actions/setup-gcloud@v2
80-
81- - name : Dump the PROD DB
82- run : |
83- gcloud sql export sql $DB_INSTANCE_NAME gs://$DUMP_BUCKET_NAME/$DUMP_FILE_NAME --database=$SOURCE_DATABASE_NAME --quiet
84-
85- - name : Authenticate to Google Cloud QA project Again Again
86- uses : google-github-actions/auth@v2
87- with :
88- credentials_json : ${{ secrets.QA_GCP_MOBILITY_FEEDS_SA_KEY }}
89-
90- - name : GCloud Setup 3
91- uses : google-github-actions/setup-gcloud@v2
92-
93- - name : QA backup and import dump into the QA DB
94- run : |
95- # Dump the QA database as a backup
96- # According to chatgpt,
97- # This is Google's recommended, safe method and doesn’t require direct access to the DB. It runs the export
98- # in a way that avoids locking the database and works from GCP itself (so no traffic leaves GCP).
99- gcloud sql export sql $DB_INSTANCE_NAME gs://$DUMP_BUCKET_NAME/qa-db-dump-backup.sql --database=$SOURCE_DATABASE_NAME --quiet
100-
101- # Delete the existing database
102- gcloud sql databases delete $DEST_DATABASE_NAME --instance=$DB_INSTANCE_NAME --quiet
103-
104- # Create a the new database
105- gcloud sql databases create $DEST_DATABASE_NAME --instance=$DB_INSTANCE_NAME
106-
107- # Import the dump into the QA database
108- # The exported sql contains statements that require authentication as user postgres.
109- # In theory we could dump the DB without these statements, with:
110- # pg_dump --no-owner --no-privileges -d your_database > clean_dump.sql.
131+ mkdir -p ~/.ssh
132+ echo "${{ env.GCP_FEED_BASTION_SSH_KEY }}" > ~/.ssh/id_rsa
133+ chmod 600 ~/.ssh/id_rsa
134+ ./scripts/tunnel-create.sh -project_id $DEST_PROJECT_ID -zone ${GCP_REGION}-a -instance ${GCP_FEED_BASTION_NAME}-qa -target_account ${GCP_FEED_SSH_USER} -db_instance ${DB_INSTANCE_NAME} -port 5454
135+ sleep 10 # Wait for the tunnel to establish
111136
112137 export PGPASSWORD=$DEST_DATABASE_PASSWORD
113- gcloud sql import sql $DB_INSTANCE_NAME gs://$DUMP_BUCKET_NAME/$DUMP_FILE_NAME --database=$DEST_DATABASE_NAME --user=$DEST_DATABASE_IMPORT_USER --quiet
114-
115- - name : Delete dump file from bucket
116- run : |
117- gsutil rm gs://$DUMP_BUCKET_NAME/$DUMP_FILE_NAME
138+ cat <<EOF | psql -h localhost -p 5454 -U data_feeds_user -d $DEST_DATABASE_NAME
139+ DO $$
140+ DECLARE
141+ r RECORD;
142+ BEGIN
143+ FOR r IN
144+ SELECT table_name
145+ FROM information_schema.tables
146+ WHERE table_schema = 'public'
147+ LOOP
148+ EXECUTE format('ALTER TABLE public.%I OWNER TO postgres;', r.table_name);
149+ END LOOP;
150+ END
151+ $$;
152+ EOF
153+
0 commit comments