forked from polleyg/gcp-batch-ingestion-bigquery
-
Notifications
You must be signed in to change notification settings - Fork 0
Expand file tree
/
Copy pathcloudbuild.yaml
More file actions
44 lines (37 loc) · 1.36 KB
/
cloudbuild.yaml
File metadata and controls
44 lines (37 loc) · 1.36 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
steps:
# 1. Fetch the source code
- name: gcr.io/cloud-builders/git
args: ['clone', 'https://github.com/polleyg/gcp-batch-ingestion-bigquery.git']
# 2a. Set up GCS & BQ etc. using public terraform Docker image
- name: hashicorp/terraform
args: ['init']
dir: 'terraform'
# 2b. Create the GCS bucket using Terraform
- name: hashicorp/terraform
id: terraform-apply
args: ['apply', '-auto-approve']
dir: 'terraform'
# 3. Build and run the Dataflow pipeline (staged template)
- name: gcr.io/cloud-builders/gradle
args: ['build', 'run']
waitFor: ['terraform-apply']
# 4a. Install npm & run tests
- name: gcr.io/cloud-builders/npm
id: npm-install-test
args: ['install-test']
dir: 'cloud-function'
waitFor: ['terraform-apply']
# 4b. Deploy the Cloud Function that listens to the bucket
- name: gcr.io/cloud-builders/gcloud
id: function-deploy
args: ['functions', 'deploy', 'goWithTheDataFlow', '--stage-bucket=gs://batch-pipeline', '--trigger-bucket=gs://batch-pipeline']
dir: 'cloud-function'
waitFor: ['npm-install-test']
# 5. Trigger the pipeline for demo purposes
- name: gcr.io/cloud-builders/gsutil
args: ['cp', 'gs://test-file-for-dataflow/*', 'gs://batch-pipeline/upload/file.csv']
# 6. Copy tarball/archive to GCS for more shenanigans later
artifacts:
objects:
location: 'gs://batch-pipeline/artifacts'
paths: ['build/distributions/*.*']