@@ -201,30 +201,7 @@ module "parking_pcn_ltn_report_summary" {
201201# migrated job "Parking_Suspensions_Processed" to dap-airflow on 19/05/2025
202202# parking_suspensions_processed_with_finyear migrated to dap-airflow on 19/05/2025
203203
204-
205- module "parking_pcn_dvla_response_no_address" {
206- source = " ../modules/aws-glue-job"
207- is_live_environment = local. is_live_environment
208- is_production_environment = local. is_production_environment
209- department = module. department_parking_data_source
210- job_name = " ${ local . short_identifier_prefix } parking_pcn_dvla_response_no_address"
211- helper_module_key = data. aws_s3_object . helpers . key
212- pydeequ_zip_key = data. aws_s3_object . pydeequ . key
213- spark_ui_output_storage_id = module. spark_ui_output_storage_data_source . bucket_id
214- script_name = " parking_pcn_dvla_response_no_address"
215- triggered_by_job = module. parking_pcn_denormalisation . job_name
216- job_description = " All VRMs with PCNs response from DVLA has no address still open and not due to be written off"
217- workflow_name = " ${ local . short_identifier_prefix } parking-liberator-data-workflow"
218- trigger_enabled = local. is_production_environment
219- number_of_workers_for_glue_job = 2
220- glue_job_worker_type = " G.1X"
221- glue_version = " 4.0"
222- job_parameters = {
223- " --job-bookmark-option" = " job-bookmark-disable"
224- " --environment" = var.environment
225- " --conf" = " spark.sql.legacy.timeParserPolicy=LEGACY --conf spark.sql.legacy.parquet.int96RebaseModeInRead=LEGACY --conf spark.sql.legacy.parquet.int96RebaseModeInWrite=LEGACY --conf spark.sql.legacy.parquet.datetimeRebaseModeInRead=LEGACY --conf spark.sql.legacy.parquet.datetimeRebaseModeInWrite=LEGACY"
226- }
227- }
204+ # Migrated "parking_pcn_dvla_response_no_address" to dap-airflow on 30/05/2025
228205
229206# Migrated job "parking_motorcycle_permits_480" to dap-airflow om 20/02/2025
230207# Migrated job "parking_permit_street_cpz_stress_mc" to dap-airflow on 21/05/2025
0 commit comments