@@ -124,37 +124,12 @@ module "parking_pcn_report_summary" {
124124 }
125125}
126126
127- module "parking_pcn_ltn_report_summary" {
128- source = " ../modules/aws-glue-job"
129- is_live_environment = local. is_live_environment
130- is_production_environment = local. is_production_environment
131- department = module. department_parking_data_source
132- job_name = " ${ local . short_identifier_prefix } parking_pcn_ltn_report_summary"
133- helper_module_key = data. aws_s3_object . helpers . key
134- pydeequ_zip_key = data. aws_s3_object . pydeequ . key
135- spark_ui_output_storage_id = module. spark_ui_output_storage_data_source . bucket_id
136- script_name = " parking_pcn_ltn_report_summary"
137- triggered_by_job = module. parking_pcn_denormalisation . job_name
138- job_description = " This job creates the LTN PCN count and Total paid"
139- workflow_name = " ${ local . short_identifier_prefix } parking-liberator-data-workflow"
140- number_of_workers_for_glue_job = 2
141- glue_job_worker_type = " G.1X"
142- glue_version = " 4.0"
143- job_parameters = {
144- " --job-bookmark-option" = " job-bookmark-enable"
145- " --environment" = var.environment
146- " --conf" = " spark.sql.legacy.timeParserPolicy=LEGACY --conf spark.sql.legacy.parquet.int96RebaseModeInRead=LEGACY --conf spark.sql.legacy.parquet.int96RebaseModeInWrite=LEGACY --conf spark.sql.legacy.parquet.datetimeRebaseModeInRead=LEGACY --conf spark.sql.legacy.parquet.datetimeRebaseModeInWrite=LEGACY"
147- }
148- }
127+ # Migrated job "parking_pcn_ltn_report_summary" to dap-airflow om 30/05/2025
149128
150129# migrated Parking_Suspension_DeNormalised_Data to airflow on 19/05/2025
151130
152-
153-
154-
155131# migrated Parking_Permit_DeNormalised_Data to airflow on 20/05/2025
156132
157-
158133# The airflow has the latest version of these 7 tables
159134# removed Parking_Deployment_Target_Details
160135# removed parking_ceo_average_on_street
0 commit comments