Skip to content

Commit 1f7c70a

Browse files
reinstate pushdown predicates
1 parent 8f85488 commit 1f7c70a

7 files changed

+11
-16
lines changed

scripts/jobs/parking/parking_cedar_fulling_total_summary.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -9,6 +9,7 @@
99

1010
from scripts.helpers.helpers import create_pushdown_predicate, get_glue_env_var
1111

12+
1213
environment = get_glue_env_var("environment")
1314

1415

@@ -31,8 +32,7 @@ def sparkSqlQuery(glueContext, query, mapping, transformation_ctx) -> DynamicFra
3132
database="dataplatform-" + environment + "-liberator-refined-zone",
3233
table_name="parking_cedar_payments",
3334
transformation_ctx="AmazonS3_node1625732038443",
34-
# teporarily removed while table partitions are fixed
35-
# push_down_predicate=create_pushdown_predicate("import_date", 7),
35+
push_down_predicate=create_pushdown_predicate("import_date", 7),
3636
)
3737

3838
# Script generated for node Amazon S3

scripts/jobs/parking/parking_ceo_average_on_street_hrs_mins_secs.py

Lines changed: 3 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -9,6 +9,7 @@
99

1010
from scripts.helpers.helpers import create_pushdown_predicate, get_glue_env_var
1111

12+
1213
environment = get_glue_env_var("environment")
1314

1415

@@ -31,17 +32,15 @@ def sparkSqlQuery(glueContext, query, mapping, transformation_ctx) -> DynamicFra
3132
database="dataplatform-" + environment + "-liberator-refined-zone",
3233
table_name="parking_ceo_on_street",
3334
transformation_ctx="AmazonS3_node1628173244776",
34-
# teporarily removed while table partitions are fixed
35-
# push_down_predicate=create_pushdown_predicate("import_date", 7),
35+
push_down_predicate=create_pushdown_predicate("import_date", 7),
3636
)
3737

3838
# Script generated for node Amazon S3
3939
AmazonS3_node1638273151502 = glueContext.create_dynamic_frame.from_catalog(
4040
database="dataplatform-" + environment + "-liberator-refined-zone",
4141
table_name="parking_ceo_summary",
4242
transformation_ctx="AmazonS3_node1638273151502",
43-
# teporarily removed while table partitions are fixed
44-
# push_down_predicate=create_pushdown_predicate("import_date", 7),
43+
push_down_predicate=create_pushdown_predicate("import_date", 7),
4544
)
4645

4746
# Script generated for node ApplyMapping

scripts/jobs/parking/parking_deployment_target_details.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -9,6 +9,7 @@
99

1010
from scripts.helpers.helpers import create_pushdown_predicate, get_glue_env_var
1111

12+
1213
environment = get_glue_env_var("environment")
1314

1415

@@ -45,8 +46,7 @@ def sparkSqlQuery(glueContext, query, mapping, transformation_ctx) -> DynamicFra
4546
database="dataplatform-" + environment + "-liberator-refined-zone",
4647
table_name="parking_ceo_on_street",
4748
transformation_ctx="AmazonS3_node1633593851886",
48-
# teporarily removed while table partitions are fixed
49-
# push_down_predicate=create_pushdown_predicate("import_date", 7),
49+
push_down_predicate=create_pushdown_predicate("import_date", 7),
5050
)
5151

5252
# Script generated for node Amazon S3

scripts/jobs/parking/parking_permit_street_stress.py

Lines changed: 1 addition & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -53,8 +53,7 @@ def sparkSqlQuery(glueContext, query, mapping, transformation_ctx) -> DynamicFra
5353
database="dataplatform-" + environment + "-liberator-refined-zone",
5454
table_name="parking_permit_denormalised_data",
5555
transformation_ctx="AmazonS3_node1681807784480",
56-
# teporarily removed while table partitions are fixed
57-
# push_down_predicate=create_pushdown_predicate("import_date", 7),
56+
push_down_predicate=create_pushdown_predicate("import_date", 7),
5857
)
5958

6059
# Script generated for node SQL

scripts/jobs/parking/parking_permit_street_stress_with_cpz.py

Lines changed: 1 addition & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -53,8 +53,7 @@ def sparkSqlQuery(glueContext, query, mapping, transformation_ctx) -> DynamicFra
5353
database="dataplatform-" + environment + "-liberator-refined-zone",
5454
table_name="parking_permit_denormalised_data",
5555
transformation_ctx="AmazonS3_node1681807784480",
56-
# teporarily removed while table partitions are fixed
57-
# push_down_predicate=create_pushdown_predicate("import_date", 7),
56+
push_down_predicate=create_pushdown_predicate("import_date", 7),
5857
)
5958

6059
# Script generated for node SQL

scripts/jobs/parking/parking_suspensions_processed.py

Lines changed: 1 addition & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -45,8 +45,7 @@ def sparkSqlQuery(glueContext, query, mapping, transformation_ctx) -> DynamicFra
4545
database="dataplatform-" + environment + "-liberator-refined-zone",
4646
table_name="parking_suspension_denormalised_data",
4747
transformation_ctx="AmazonS3_node1661350417347",
48-
# teporarily removed while table partitions are fixed
49-
# push_down_predicate=create_pushdown_predicate("import_date", 7),
48+
push_down_predicate=create_pushdown_predicate("import_date", 7),
5049
)
5150

5251
# Script generated for node SQL

scripts/jobs/parking/parking_suspensions_processed_with_finyear.py

Lines changed: 1 addition & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -45,8 +45,7 @@ def sparkSqlQuery(glueContext, query, mapping, transformation_ctx) -> DynamicFra
4545
database="dataplatform-" + environment + "-liberator-refined-zone",
4646
table_name="parking_suspension_denormalised_data",
4747
transformation_ctx="AmazonS3_node1661350417347",
48-
# teporarily removed while table partitions are fixed
49-
# push_down_predicate=create_pushdown_predicate("import_date", 7),
48+
push_down_predicate=create_pushdown_predicate("import_date", 7),
5049
)
5150

5251
# Script generated for node Amazon S3

0 commit comments

Comments
 (0)