Skip to content

Commit da4033b

Browse files
authored
feat: Added cloud sql restore module that uses database import (#343)
adding cloud sql restore module that uses database import
1 parent 95e48a1 commit da4033b

File tree

6 files changed

+423
-0
lines changed

6 files changed

+423
-0
lines changed

modules/restore/README.md

Lines changed: 52 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,52 @@
1+
# GCP CloudSQL Restore
2+
3+
## Import from GCS Export Dump
4+
5+
This module can be used for [importing Cloud SQL Postgres database](https://cloud.google.com/sql/docs/postgres/import-export/import-export-sql) from a SQL export dump stored in GCS bucket.
6+
7+
This module uses the SQL export dump file timestamp passed as an input parameter to the Workflow to get the exported dumps from GCS. Following are the steps in import workflow:
8+
9+
1. Fetch list of databases from the source database instance (one that the export was created for)
10+
2. Delete the databases (list from step 1) except system (`postgres` for Postgres and `tempdb` for SQL Server) databases in the database instance that we are going to import databases to
11+
3. Create the databases (list from step 1) except system databases in the import database instance
12+
4. Fetch the SQL export file(s) from GCS and import those into the import database instance
13+
5. The import API call is asynchronous, so the workflow checks the status of the import at regular interval and wait until it finishes
14+
15+
## How to run
16+
17+
```
18+
gcloud workflows run [WORKFLOW_NAME] --data='{"exportTimestamp":"[EXPORT_TIMESTAMP]"}'
19+
```
20+
21+
where `WORKFLOW_NAME` is the name of your import workflow and `exportTimestamp` is the timestamp of your export file(s) (you can get it from GCS object key of the export file). For example:
22+
23+
```
24+
gcloud workflows run my-import-workflow --data='{"exportTimestamp": "1658779617"}'
25+
```
26+
27+
## Required APIs
28+
29+
- `workflows.googleapis.com`
30+
- `cloudscheduler.googleapis.com`
31+
32+
<!-- BEGINNING OF PRE-COMMIT-TERRAFORM DOCS HOOK -->
33+
## Inputs
34+
35+
| Name | Description | Type | Default | Required |
36+
|------|-------------|------|---------|:--------:|
37+
| import\_databases | The list of databases that should be imported - if is an empty set all databases will be imported | `set(string)` | `[]` | no |
38+
| import\_uri | The bucket and path uri of GCS backup file for importing | `string` | n/a | yes |
39+
| project\_id | The project ID | `string` | n/a | yes |
40+
| region | The region to run the workflow | `string` | `"us-central1"` | no |
41+
| service\_account | The service account to use for running the workflow and triggering the workflow by Cloud Scheduler - If empty or null a service account will be created. If you have provided a service account you need to grant the Cloud SQL Admin and the Workflows Invoker role to that | `string` | `null` | no |
42+
| sql\_instance | The name of the SQL instance to backup | `string` | n/a | yes |
43+
44+
## Outputs
45+
46+
| Name | Description |
47+
|------|-------------|
48+
| import\_workflow\_name | The name for import workflow |
49+
| region | n/a |
50+
| service\_account | The service account email running the scheduler and workflow |
51+
52+
<!-- END OF PRE-COMMIT-TERRAFORM DOCS HOOK -->

modules/restore/main.tf

Lines changed: 80 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,80 @@
1+
/**
2+
* Copyright 2022 Google LLC
3+
*
4+
* Licensed under the Apache License, Version 2.0 (the "License");
5+
* you may not use this file except in compliance with the License.
6+
* You may obtain a copy of the License at
7+
*
8+
* http://www.apache.org/licenses/LICENSE-2.0
9+
*
10+
* Unless required by applicable law or agreed to in writing, software
11+
* distributed under the License is distributed on an "AS IS" BASIS,
12+
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13+
* See the License for the specific language governing permissions and
14+
* limitations under the License.
15+
*/
16+
17+
18+
locals {
19+
create_service_account = var.service_account == null || var.service_account == "" ? true : false
20+
service_account = local.create_service_account ? google_service_account.sql_import_serviceaccount[0].email : var.service_account
21+
}
22+
23+
24+
################################
25+
# #
26+
# Service Account and IAM #
27+
# #
28+
################################
29+
resource "google_service_account" "sql_import_serviceaccount" {
30+
count = local.create_service_account ? 1 : 0
31+
account_id = trimsuffix(substr("import-${var.sql_instance}", 0, 28), "-")
32+
display_name = "Managed by Terraform - Service account for import of SQL Instance ${var.sql_instance}"
33+
project = var.project_id
34+
}
35+
36+
resource "google_project_iam_member" "sql_import_serviceaccount_sql_admin" {
37+
count = local.create_service_account ? 1 : 0
38+
member = "serviceAccount:${google_service_account.sql_import_serviceaccount[0].email}"
39+
role = "roles/cloudsql.admin"
40+
project = var.project_id
41+
}
42+
43+
resource "google_project_iam_member" "sql_import_serviceaccount_workflow_invoker" {
44+
count = local.create_service_account ? 1 : 0
45+
member = "serviceAccount:${google_service_account.sql_import_serviceaccount[0].email}"
46+
role = "roles/workflows.invoker"
47+
project = var.project_id
48+
}
49+
50+
data "google_sql_database_instance" "import_instance" {
51+
name = var.sql_instance
52+
project = var.project_id
53+
}
54+
55+
################################
56+
# #
57+
# Import Workflow #
58+
# #
59+
################################
60+
resource "google_workflows_workflow" "sql_import" {
61+
name = "sql-import-${var.sql_instance}"
62+
region = var.region
63+
description = "Workflow for importing the CloudSQL Instance database using an external import"
64+
project = var.project_id
65+
service_account = local.service_account
66+
source_contents = templatefile("${path.module}/templates/import.yaml.tftpl", {
67+
project = var.project_id
68+
instanceName = var.sql_instance
69+
databases = jsonencode(var.import_databases)
70+
gcsBucket = var.import_uri
71+
exportedInstance = split("/", var.import_uri)[3]
72+
dbType = split("_", data.google_sql_database_instance.import_instance.database_version)[0]
73+
})
74+
}
75+
76+
resource "google_storage_bucket_iam_member" "sql_instance_account" {
77+
bucket = split("/", var.import_uri)[2] #Get the name of the bucket out of the URI
78+
member = "serviceAccount:${data.google_sql_database_instance.import_instance.service_account_email_address}"
79+
role = "roles/storage.objectViewer"
80+
}

modules/restore/outputs.tf

Lines changed: 29 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,29 @@
1+
/**
2+
* Copyright 2022 Google LLC
3+
*
4+
* Licensed under the Apache License, Version 2.0 (the "License");
5+
* you may not use this file except in compliance with the License.
6+
* You may obtain a copy of the License at
7+
*
8+
* http://www.apache.org/licenses/LICENSE-2.0
9+
*
10+
* Unless required by applicable law or agreed to in writing, software
11+
* distributed under the License is distributed on an "AS IS" BASIS,
12+
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13+
* See the License for the specific language governing permissions and
14+
* limitations under the License.
15+
*/
16+
17+
output "import_workflow_name" {
18+
value = google_workflows_workflow.sql_import.name
19+
description = "The name for import workflow"
20+
}
21+
22+
output "service_account" {
23+
value = local.service_account
24+
description = "The service account email running the scheduler and workflow"
25+
}
26+
27+
output "region" {
28+
value = var.region
29+
}
Lines changed: 181 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,181 @@
1+
# Copyright 2022 Google LLC
2+
#
3+
# Licensed under the Apache License, Version 2.0 (the "License");
4+
# you may not use this file except in compliance with the License.
5+
# You may obtain a copy of the License at
6+
#
7+
# http://www.apache.org/licenses/LICENSE-2.0
8+
#
9+
# Unless required by applicable law or agreed to in writing, software
10+
# distributed under the License is distributed on an "AS IS" BASIS,
11+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12+
# See the License for the specific language governing permissions and
13+
# limitations under the License.
14+
15+
main:
16+
params: [args]
17+
steps:
18+
- collectInfos:
19+
assign:
20+
- databases: ${databases}
21+
%{ if databases == "[]" }
22+
- list of databases:
23+
call: googleapis.sqladmin.v1.databases.list
24+
args:
25+
project: ${project}
26+
instance: ${exportedInstance}
27+
result: dbListResult
28+
- collect DB list:
29+
for:
30+
value: db
31+
in: $${dbListResult.items}
32+
steps:
33+
- iterate:
34+
assign:
35+
- databases: $${list.concat(databases, db.name)}
36+
37+
%{ endif }
38+
39+
%{ if dbType == "POSTGRES" }
40+
- import databases:
41+
for:
42+
value: database
43+
in: $${databases}
44+
steps:
45+
- exclude postgres DB:
46+
switch:
47+
- condition: $${database != "postgres"}
48+
steps:
49+
- delete database:
50+
call: googleapis.sqladmin.v1.databases.delete
51+
args:
52+
database: $${database}
53+
instance: ${instanceName}
54+
project: ${project}
55+
- create database:
56+
call: googleapis.sqladmin.v1.databases.insert
57+
args:
58+
instance: ${instanceName}
59+
project: ${project}
60+
body:
61+
name: $${database}
62+
- import database:
63+
call: http.post
64+
args:
65+
url: $${"https://sqladmin.googleapis.com/v1/projects/" + "${project}" + "/instances/" + "${instanceName}" + "/import"}
66+
auth:
67+
type: OAuth2
68+
body:
69+
importContext:
70+
uri: $${"${gcsBucket}/${exportedInstance}-" + database + "-" + args.exportTimestamp + ".sql"}
71+
database: $${database}
72+
fileType: SQL
73+
result: importstatus
74+
- checkstatus:
75+
switch:
76+
- condition: $${importstatus.body.status != "DONE"}
77+
next: wait
78+
next: continue
79+
- wait:
80+
call: sys.sleep
81+
args:
82+
seconds: 1
83+
next: getstatus
84+
- getstatus:
85+
call: http.get
86+
args:
87+
url: $${importstatus.body.selfLink}
88+
auth:
89+
type: OAuth2
90+
result: importstatus
91+
next: checkstatus
92+
%{ endif }
93+
94+
%{ if dbType == "SQLSERVER" }
95+
- import databases:
96+
for:
97+
value: database
98+
in: $${databases}
99+
steps:
100+
- exclude System DB:
101+
switch:
102+
- condition: $${database != "tempdb"}
103+
steps:
104+
- delete database:
105+
call: googleapis.sqladmin.v1.databases.delete
106+
args:
107+
database: $${database}
108+
instance: ${instanceName}
109+
project: ${project}
110+
- create database:
111+
call: googleapis.sqladmin.v1.databases.insert
112+
args:
113+
instance: ${instanceName}
114+
project: ${project}
115+
body:
116+
name: $${database}
117+
- import database:
118+
call: http.post
119+
args:
120+
url: $${"https://sqladmin.googleapis.com/v1/projects/" + "${project}" + "/instances/" + "${instanceName}" + "/import"}
121+
auth:
122+
type: OAuth2
123+
body:
124+
importContext:
125+
uri: $${"${gcsBucket}/${exportedInstance}-" + database + "-" + args.exportTimestamp + ".bak"}
126+
database: $${database}
127+
fileType: BAK
128+
result: importstatus
129+
- checkstatus:
130+
switch:
131+
- condition: $${importstatus.body.status != "DONE"}
132+
next: wait
133+
next: continue
134+
- wait:
135+
call: sys.sleep
136+
args:
137+
seconds: 1
138+
next: getstatus
139+
- getstatus:
140+
call: http.get
141+
args:
142+
url: $${importstatus.body.selfLink}
143+
auth:
144+
type: OAuth2
145+
result: importstatus
146+
next: checkstatus
147+
%{ endif }
148+
149+
%{ if dbType == "MYSQL" }
150+
- import database:
151+
call: http.post
152+
args:
153+
url: $${"https://sqladmin.googleapis.com/v1/projects/" + "${project}" + "/instances/" + "${instanceName}" + "/import"}
154+
auth:
155+
type: OAuth2
156+
body:
157+
importContext:
158+
uri: $${"${gcsBucket}/${exportedInstance}-" + args.exportTimestamp + ".sql"}
159+
fileType: SQL
160+
result: importstatus
161+
- checkstatus:
162+
switch:
163+
- condition: $${importstatus.body.status != "DONE"}
164+
next: wait
165+
next: completed
166+
- wait:
167+
call: sys.sleep
168+
args:
169+
seconds: 1
170+
next: getstatus
171+
- getstatus:
172+
call: http.get
173+
args:
174+
url: $${importstatus.body.selfLink}
175+
auth:
176+
type: OAuth2
177+
result: importstatus
178+
next: checkstatus
179+
- completed:
180+
return: "Done"
181+
%{ endif }

0 commit comments

Comments
 (0)