@@ -392,30 +392,7 @@ module "parking_foreign_vrm_pcns" {
392392 }
393393}
394394
395-
396- module "parking_correspondence_performance_records_with_pcn" {
397- source = " ../modules/aws-glue-job"
398- is_live_environment = local. is_live_environment
399- is_production_environment = local. is_production_environment
400- department = module. department_parking_data_source
401- job_name = " ${ local . short_identifier_prefix } parking_correspondence_performance_records_with_pcn"
402- helper_module_key = data. aws_s3_object . helpers . key
403- pydeequ_zip_key = data. aws_s3_object . pydeequ . key
404- spark_ui_output_storage_id = module. spark_ui_output_storage_data_source . bucket_id
405- script_name = " parking_correspondence_performance_records_with_pcn"
406- triggered_by_job = module. parking_pcn_denormalisation . job_name
407- job_description = " correspondence performance records with pcn"
408- workflow_name = " ${ local . short_identifier_prefix } parking-liberator-data-workflow"
409- trigger_enabled = local. is_production_environment
410- number_of_workers_for_glue_job = 10
411- glue_job_worker_type = " G.1X"
412- glue_version = " 4.0"
413- job_parameters = {
414- " --job-bookmark-option" = " job-bookmark-disable"
415- " --environment" = var.environment
416- " --conf" = " spark.sql.legacy.timeParserPolicy=LEGACY --conf spark.sql.legacy.parquet.int96RebaseModeInRead=LEGACY --conf spark.sql.legacy.parquet.int96RebaseModeInWrite=LEGACY --conf spark.sql.legacy.parquet.datetimeRebaseModeInRead=LEGACY --conf spark.sql.legacy.parquet.datetimeRebaseModeInWrite=LEGACY"
417- }
418- }
395+ # Migrated job "parking_correspondence_performance_records_with_pcn" to dap-airflow om 14/04/2025
419396
420397module "parking_disputes_kpi_gds_summary" {
421398 source = " ../modules/aws-glue-job"
@@ -506,30 +483,8 @@ module "Parking_Permit_Diesel_Trends_Bought_in_Month" {
506483 " --environment" = var.environment
507484 }
508485}
509- module "parking_correspondence_performance_records_with_pcn_gds" {
510- source = " ../modules/aws-glue-job"
511- is_live_environment = local. is_live_environment
512- is_production_environment = local. is_production_environment
513- department = module. department_parking_data_source
514- job_name = " ${ local . short_identifier_prefix } parking_correspondence_performance_records_with_pcn_gds"
515- helper_module_key = data. aws_s3_object . helpers . key
516- pydeequ_zip_key = data. aws_s3_object . pydeequ . key
517- spark_ui_output_storage_id = module. spark_ui_output_storage_data_source . bucket_id
518- script_name = " parking_correspondence_performance_records_with_pcn_gds"
519- triggered_by_job = module. parking_pcn_denormalisation . job_name
520- job_description = " parking_correspondence_performance_records_with_pcn with no timestamp for GDS"
521- workflow_name = " ${ local . short_identifier_prefix } parking-liberator-data-workflow"
522- trigger_enabled = local. is_production_environment
523- number_of_workers_for_glue_job = 10
524- glue_job_worker_type = " G.1X"
525- glue_version = " 4.0"
526- job_parameters = {
527- " --job-bookmark-option" = " job-bookmark-disable"
528- " --environment" = var.environment
529- " --conf" = " spark.sql.legacy.timeParserPolicy=LEGACY --conf spark.sql.legacy.parquet.int96RebaseModeInRead=LEGACY --conf spark.sql.legacy.parquet.int96RebaseModeInWrite=LEGACY --conf spark.sql.legacy.parquet.datetimeRebaseModeInRead=LEGACY --conf spark.sql.legacy.parquet.datetimeRebaseModeInWrite=LEGACY"
530- }
531- }
532486
487+ # Migrated job "parking_correspondence_performance_records_with_pcn_gds" to dap-airflow om 14/04/2025
533488
534489module "parking_foi_pcn_gds_daily_summary_records" {
535490 source = " ../modules/aws-glue-job"
@@ -634,53 +589,9 @@ module "parking_permit_street_stress_with_cpz" {
634589 " --environment" = var.environment
635590 }
636591}
637- module "parking_correspondence_performance_records_with_pcn_downtime" {
638- source = " ../modules/aws-glue-job"
639- is_live_environment = local. is_live_environment
640- is_production_environment = local. is_production_environment
641- department = module. department_parking_data_source
642- job_name = " ${ local . short_identifier_prefix } parking_correspondence_performance_records_with_pcn_downtime"
643- helper_module_key = data. aws_s3_object . helpers . key
644- pydeequ_zip_key = data. aws_s3_object . pydeequ . key
645- spark_ui_output_storage_id = module. spark_ui_output_storage_data_source . bucket_id
646- script_name = " parking_correspondence_performance_records_with_pcn_downtime"
647- triggered_by_job = module. parking_pcn_denormalisation . job_name
648- job_description = " correspondence performance records with pcn FOI records Team details and Downtime data"
649- workflow_name = " ${ local . short_identifier_prefix } parking-liberator-data-workflow"
650- trigger_enabled = local. is_production_environment
651- number_of_workers_for_glue_job = 10
652- glue_job_worker_type = " G.1X"
653- glue_version = " 4.0"
654- job_parameters = {
655- " --job-bookmark-option" = " job-bookmark-disable"
656- " --environment" = var.environment
657- " --conf" = " spark.sql.legacy.timeParserPolicy=LEGACY --conf spark.sql.legacy.parquet.int96RebaseModeInRead=LEGACY --conf spark.sql.legacy.parquet.int96RebaseModeInWrite=LEGACY --conf spark.sql.legacy.parquet.datetimeRebaseModeInRead=LEGACY --conf spark.sql.legacy.parquet.datetimeRebaseModeInWrite=LEGACY"
658- }
659- }
660592
661- module "parking_correspondence_performance_records_with_pcn_downtime_gds" {
662- source = " ../modules/aws-glue-job"
663- is_live_environment = local. is_live_environment
664- is_production_environment = local. is_production_environment
665- department = module. department_parking_data_source
666- job_name = " ${ local . short_identifier_prefix } parking_correspondence_performance_records_with_pcn_downtime_gds"
667- helper_module_key = data. aws_s3_object . helpers . key
668- pydeequ_zip_key = data. aws_s3_object . pydeequ . key
669- spark_ui_output_storage_id = module. spark_ui_output_storage_data_source . bucket_id
670- script_name = " parking_correspondence_performance_records_with_pcn_downtime_gds"
671- triggered_by_job = module. parking_pcn_denormalisation . job_name
672- job_description = " correspondence performance records with pcn FOI records Team details and Downtime data for Google Studio - gds"
673- workflow_name = " ${ local . short_identifier_prefix } parking-liberator-data-workflow"
674- trigger_enabled = local. is_production_environment
675- number_of_workers_for_glue_job = 10
676- glue_job_worker_type = " G.1X"
677- glue_version = " 4.0"
678- job_parameters = {
679- " --job-bookmark-option" = " job-bookmark-disable"
680- " --environment" = var.environment
681- " --conf" = " spark.sql.legacy.timeParserPolicy=LEGACY --conf spark.sql.legacy.parquet.int96RebaseModeInRead=LEGACY --conf spark.sql.legacy.parquet.int96RebaseModeInWrite=LEGACY --conf spark.sql.legacy.parquet.datetimeRebaseModeInRead=LEGACY --conf spark.sql.legacy.parquet.datetimeRebaseModeInWrite=LEGACY"
682- }
683- }
593+ # Migrated job "parking_correspondence_performance_records_with_pcn_downtime" to dap-airflow om 14/04/2025
594+ # Migrated job "parking_correspondence_performance_records_with_pcn_downtime_gds" to dap-airflow om 14/04/2025
684595
685596module "parking_open_pcns_vrms_linked_cancelled_ringer" {
686597 source = " ../modules/aws-glue-job"
0 commit comments