@@ -491,30 +491,8 @@ module "parking_pcn_dvla_response_no_address" {
491491}
492492
493493# Migrated job "parking_motorcycle_permits_480" to dap-airflow om 20/02/2025
494+ # Migrated job "parking_permit_street_cpz_stress_mc" to dap-airflow on 21/05/2025
494495
495- module "parking_permit_street_cpz_stress_mc" {
496- source = " ../modules/aws-glue-job"
497- is_live_environment = local. is_live_environment
498- is_production_environment = local. is_production_environment
499- department = module. department_parking_data_source
500- job_name = " ${ local . short_identifier_prefix } parking_permit_street_cpz_stress_mc"
501- helper_module_key = data. aws_s3_object . helpers . key
502- pydeequ_zip_key = data. aws_s3_object . pydeequ . key
503- spark_ui_output_storage_id = module. spark_ui_output_storage_data_source . bucket_id
504- script_name = " parking_permit_street_cpz_stress_mc"
505- triggered_by_job = module. parking_permit_de_normalisation . job_name
506- job_description = " A new way for Mike to get the parking_permit_street_cpz_stress_mc data"
507- workflow_name = " ${ local . short_identifier_prefix } parking-liberator-data-workflow"
508- trigger_enabled = local. is_production_environment
509- number_of_workers_for_glue_job = 2 # 2 minimum which is enough for this job
510- glue_job_worker_type = " G.1X"
511- glue_version = " 4.0"
512- job_parameters = {
513- " --job-bookmark-option" = " job-bookmark-disable"
514- " --environment" = var.environment
515- " --conf" = " spark.sql.legacy.timeParserPolicy=LEGACY --conf spark.sql.legacy.parquet.int96RebaseModeInRead=LEGACY --conf spark.sql.legacy.parquet.int96RebaseModeInWrite=LEGACY --conf spark.sql.legacy.parquet.datetimeRebaseModeInRead=LEGACY --conf spark.sql.legacy.parquet.datetimeRebaseModeInWrite=LEGACY"
516- }
517- }
518496
519497# MRB 18-08-2024 job created
520498# Migrated job "parking_permit_denormalisation_mc" to dap-airflow om 01/05/2025
0 commit comments