Skip to content

Commit a65afd1

Browse files
authored
change two glue jobs' triggers (#2081)
* change the glue job triggers * change the trigger to new crawler
1 parent 8a0759d commit a65afd1

File tree

1 file changed

+2
-2
lines changed

1 file changed

+2
-2
lines changed

terraform/etl/38-aws-glue-job-parking.tf

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1087,7 +1087,7 @@ module "parking_defect_met_fail" {
10871087
pydeequ_zip_key = data.aws_s3_object.pydeequ.key
10881088
spark_ui_output_storage_id = module.spark_ui_output_storage_data_source.bucket_id
10891089
script_name = "parking_defect_met_fail"
1090-
triggered_by_crawler = local.is_live_environment ? module.parking_spreadsheet_parking_ops_db_defects_mgt[0].crawler_name : null
1090+
triggered_by_crawler = local.is_live_environment ? aws_glue_crawler.parking_google_sheet_ingestion_raw_zone[0].name : null
10911091
job_description = "To collect and format the Ops Defect Data."
10921092
trigger_enabled = local.is_production_environment
10931093
glue_job_timeout = 10
@@ -1132,7 +1132,7 @@ module "parking_defect_met_fail_monthly_format" {
11321132
pydeequ_zip_key = data.aws_s3_object.pydeequ.key
11331133
spark_ui_output_storage_id = module.spark_ui_output_storage_data_source.bucket_id
11341134
script_name = "parking_defect_met_fail_monthly_format"
1135-
triggered_by_crawler = local.is_live_environment ? module.parking_spreadsheet_parking_ops_db_defects_mgt[0].crawler_name : null
1135+
triggered_by_crawler = local.is_live_environment ? aws_glue_crawler.parking_google_sheet_ingestion_raw_zone[0].name : null
11361136
job_description = "To collect and format the Ops Defect Data into a Pivot."
11371137
trigger_enabled = local.is_production_environment
11381138
glue_job_timeout = 10

0 commit comments

Comments
 (0)