Skip to content

Commit ffa9138

Browse files
Merge branch 'main' into tascomi-static-reload
2 parents 5f760e8 + 7c03099 commit ffa9138

File tree

20 files changed

+118
-67
lines changed

20 files changed

+118
-67
lines changed

external-lib/Makefile

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -3,7 +3,7 @@
33
all: target/java-lib-1.0-SNAPSHOT-jar-with-dependencies.jar target/deequ-1.0.3.jar target/pydeequ-1.0.1.zip
44

55
target/java-lib-1.0-SNAPSHOT-jar-with-dependencies.jar: pom.xml
6-
mvn assembly:single -DdescriptorId=jar-with-dependencies
6+
mvn clean package
77

88
target/deequ-1.0.3.jar:
99
wget https://repo1.maven.org/maven2/com/amazon/deequ/deequ/1.0.3/deequ-1.0.3.jar -O target/deequ-1.0.3.jar

external-lib/pom.xml

Lines changed: 33 additions & 30 deletions
Original file line numberDiff line numberDiff line change
@@ -5,13 +5,14 @@
55
<artifactId>java-lib</artifactId>
66
<version>1.0-SNAPSHOT</version>
77
<name>java-lib</name>
8-
<!-- FIXME change it to the project's website -->
98
<url>http://www.example.com</url>
9+
1010
<properties>
1111
<project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
1212
<maven.compiler.source>1.7</maven.compiler.source>
1313
<maven.compiler.target>1.7</maven.compiler.target>
1414
</properties>
15+
1516
<dependencies>
1617
<dependency>
1718
<groupId>junit</groupId>
@@ -25,41 +26,44 @@
2526
<version>0.13.7</version>
2627
</dependency>
2728
</dependencies>
29+
2830
<build>
31+
<plugins>
32+
<plugin>
33+
<groupId>org.apache.maven.plugins</groupId>
34+
<artifactId>maven-assembly-plugin</artifactId>
35+
<version>3.3.0</version>
36+
<configuration>
37+
<descriptorRefs>
38+
<descriptorRef>jar-with-dependencies</descriptorRef>
39+
</descriptorRefs>
40+
<archive>
41+
<manifest>
42+
<addClasspath>true</addClasspath>
43+
</manifest>
44+
</archive>
45+
<appendAssemblyId>true</appendAssemblyId>
46+
<ignoreDirFormatExtensions>false</ignoreDirFormatExtensions>
47+
<includeProjectArtifact>false</includeProjectArtifact>
48+
</configuration>
49+
<executions>
50+
<execution>
51+
<id>make-assembly</id>
52+
<phase>package</phase>
53+
<goals>
54+
<goal>single</goal>
55+
</goals>
56+
</execution>
57+
</executions>
58+
</plugin>
59+
</plugins>
60+
2961
<pluginManagement>
30-
<!-- lock down plugins versions to avoid using Maven defaults (may be moved to parent pom) -->
3162
<plugins>
32-
<plugin>
33-
<groupId>org.apache.maven.plugins</groupId>
34-
<artifactId>maven-shade-plugin</artifactId>
35-
<version>3.2.4</version>
36-
<executions>
37-
<execution>
38-
<phase>package</phase>
39-
<goals>
40-
<goal>shade</goal>
41-
</goals>
42-
<configuration>
43-
<artifactSet>
44-
<excludes>
45-
<exclude>classworlds:classworlds</exclude>
46-
<exclude>junit:junit</exclude>
47-
<exclude>jmock:*</exclude>
48-
<exclude>*:xml-apis</exclude>
49-
<exclude>org.apache.maven:lib:tests</exclude>
50-
<exclude>log4j:log4j:jar:</exclude>
51-
</excludes>
52-
</artifactSet>
53-
</configuration>
54-
</execution>
55-
</executions>
56-
</plugin>
57-
<!-- clean lifecycle, see https://maven.apache.org/ref/current/maven-core/lifecycles.html#clean_Lifecycle -->
5863
<plugin>
5964
<artifactId>maven-clean-plugin</artifactId>
6065
<version>3.1.0</version>
6166
</plugin>
62-
<!-- default lifecycle, jar packaging: see https://maven.apache.org/ref/current/maven-core/default-bindings.html#Plugin_bindings_for_jar_packaging -->
6367
<plugin>
6468
<artifactId>maven-resources-plugin</artifactId>
6569
<version>3.0.2</version>
@@ -84,7 +88,6 @@
8488
<artifactId>maven-deploy-plugin</artifactId>
8589
<version>2.8.2</version>
8690
</plugin>
87-
<!-- site lifecycle, see https://maven.apache.org/ref/current/maven-core/lifecycles.html#site_Lifecycle -->
8891
<plugin>
8992
<artifactId>maven-site-plugin</artifactId>
9093
<version>3.7.1</version>

lambdas/g_drive_folder_to_s3/Dockerfile

Lines changed: 1 addition & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -18,7 +18,7 @@ COPY Pipfile Pipfile.lock /app/
1818
COPY main.py ./source/
1919

2020
# Install Python dependencies using pipenv
21-
RUN pipenv install
21+
RUN pipenv install
2222
RUN pipenv requirements > requirements.txt
2323
RUN pip install -t ./source/lib -r requirements.txt
2424

@@ -27,7 +27,3 @@ WORKDIR /app/source
2727
RUN zip -r g_drive_folder_to_s3.zip .
2828

2929
CMD "pyhon3", "main.py"
30-
31-
32-
33-

scripts/jobs/parking/Parking_interim_cycle_hangar_waiting_list.py

Lines changed: 7 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -23,6 +23,7 @@
2323
23/12/2024 - Create SQL
2424
06/01/2025 - add unsubscribed email
2525
13/01/2025 - add opt-in data
26+
21/01/2024 - change to use new telephone & email field in Michael's data
2627
*********************************************************************************/
2728
With Interim_Wait as (
2829
SELECT
@@ -39,6 +40,7 @@
3940
,y
4041
,lat
4142
,long
43+
,telephone
4244
FROM "parking-raw-zone".interim_cycle_wait_list
4345
WHERE import_date = (select max(import_date)
4446
from "parking-raw-zone".interim_cycle_wait_list)),
@@ -84,14 +86,13 @@
8486
from "parking-raw-zone".parking_parking_opt_in_form_responses)
8587
AND please_select_one_of_the_options_below like 'No.%')
8688
89+
/*** 21/01/2025 - Update the selected list to use Michael's data (Telephone & Email)***/
8790
SELECT
88-
A.*, cast(D.telephone_number as varchar) as Telephone_Number, C.address2 as Street, B.housing_estate,
89-
CASE
90-
When length(E.email_address) > 1 Then E.email_address
91-
When length(F.email)> 1 Then F.email
92-
END as email_address,
91+
A.forename, A.surname, A.email, A.party_id_to, A.party_id, A.uprn, A.address1, A.address2,
92+
A.post_code, A.x, A.y, A.lat, A.long, cast(telephone as varchar) as telephone_number,
93+
C.address2 as Street, B.housing_estate, A.email as email_address,
9394
94-
format_datetime(CAST(CURRENT_TIMESTAMP AS timestamp),
95+
format_datetime(CAST(CURRENT_TIMESTAMP AS timestamp),
9596
'yyyy-MM-dd HH:mm:ss') AS import_date_timestamp,
9697
9798
format_datetime(current_date, 'yyyy') AS import_year,

scripts/jobs/parking/parking_defect_met_fail.py

Lines changed: 5 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -43,14 +43,15 @@ def sparkSqlQuery(glueContext, query, mapping, transformation_ctx) -> DynamicFra
4343

4444
# Script generated for node SQL
4545
SqlQuery33 = """
46-
/*********************************************************************************
46+
/*********************************************************************************************
4747
Parking_Defect_MET_FAIL
4848
4949
Temp SQL that formats the defcet managment records for Fail/Met
5050
5151
16/11/2022 - Create Query
5252
15/01/2025 - found AND WHERE repair_date >= (??) removed AND
53-
*********************************************************************************/
53+
20/01/2025 - length(ltrim(rtrim(reported_date))) > 0 to length(ltrim(rtrim(reported_date))) > 5
54+
**********************************************************************************************/
5455
With Defect as (
5556
SELECT
5657
reference_no,
@@ -69,7 +70,7 @@ def sparkSqlQuery(glueContext, query, mapping, transformation_ctx) -> DynamicFra
6970
substr(repair_date, 7, 2)||'-'||
7071
substr(repair_date, 9, 2)
7172
72-
When reported_date like '%/%'Then substr(repair_date, 7, 4)||'-'||
73+
When repair_date like '%/%'Then substr(repair_date, 7, 4)||'-'||
7374
substr(repair_date, 4, 2)||'-'||
7475
substr(repair_date, 1, 2)
7576
ELSE substr(cast(repair_date as string),1, 10)
@@ -82,7 +83,7 @@ def sparkSqlQuery(glueContext, query, mapping, transformation_ctx) -> DynamicFra
8283
8384
FROM parking_parking_ops_db_defects_mgt
8485
WHERE import_date = (Select MAX(import_date) from parking_parking_ops_db_defects_mgt)
85-
AND length(ltrim(rtrim(reported_date))) > 0
86+
AND length(ltrim(rtrim(reported_date))) > 5 AND length(ltrim(rtrim(repair_date))) > 5
8687
AND met_not_met not IN ('#VALUE!','#N/A') /*('N/A','#N/A','#VALUE!')*/)
8788
8889
SELECT

terraform/config/prod.tfvars

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -10,7 +10,7 @@ stack = "Core"
1010
transit_gateway_availability_zones = ["eu-west-2a", "eu-west-2b", "eu-west-2c"]
1111
transit_gateway_cidr = "10.120.32.0/24"
1212
transit_gateway_private_subnets = ["10.120.32.0/26", "10.120.32.64/26", "10.120.32.128/26"]
13-
rds_instance_ids = ["addresses-api-db-production-emergency-temp"]
13+
rds_instance_ids = []
1414
qlik_server_instance_type = "m5.2xlarge"
1515
qlik_ssl_certificate_domain = "qliksense.hackney.gov.uk"
1616
redshift_public_ips = ["18.168.126.173"]

terraform/core/13-mssql-ingestion.tf

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -190,7 +190,7 @@ module "max_concurrency_lambda" {
190190
s3_key = "academy-revs-and-bens-housing-needs-database-ingestion-max-concurrency.zip"
191191
lambda_source_dir = "../../lambdas/calculate_max_concurrency"
192192
lambda_output_path = "../../lambdas/calculate_max_concurrency/max-concurrency.zip"
193-
runtime = "python3.8"
193+
runtime = "python3.9"
194194
}
195195

196196
resource "aws_iam_role" "academy_step_functions_role" {

terraform/core/15-unrestricted-geolive-database-ingestion.tf

Lines changed: 50 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -247,3 +247,53 @@ module "housing_boundaries_geolive_ingestion_job" {
247247
table_prefix = null
248248
}
249249
}
250+
251+
module "llpg_geolive_database_ingestion" {
252+
count = local.is_live_environment ? 1 : 0
253+
tags = module.tags.values
254+
255+
source = "../modules/database-ingestion-via-jdbc-connection"
256+
257+
name = "geolive-llpg-schema"
258+
jdbc_connection_url = "jdbc:postgresql://geolive-db-prod.cjgyygrtgrhl.eu-west-2.rds.amazonaws.com:5432/geolive"
259+
jdbc_connection_description = "JDBC connection to Geolive PostgreSQL database, to access the llpg schema only"
260+
jdbc_connection_subnet = data.aws_subnet.network[local.instance_subnet_id]
261+
identifier_prefix = local.short_identifier_prefix
262+
database_secret_name = "database-credentials/geolive-llpg"
263+
schema_name = "llpg"
264+
job_schedule = "cron(1 1 ? * * *)"
265+
}
266+
267+
module "llpg_geolive_ingestion_job" {
268+
count = local.is_live_environment ? 1 : 0
269+
source = "../modules/aws-glue-job"
270+
is_live_environment = local.is_live_environment
271+
is_production_environment = local.is_production_environment
272+
273+
department = module.department_unrestricted
274+
job_name = "${local.short_identifier_prefix}geolive llpg tables ingestion"
275+
glue_version = "4.0"
276+
script_s3_object_key = aws_s3_object.ingest_database_tables_via_jdbc_connection.key
277+
spark_ui_output_storage_id = module.spark_ui_output_storage.bucket_id
278+
helper_module_key = aws_s3_object.helpers.key
279+
pydeequ_zip_key = aws_s3_object.pydeequ.key
280+
jdbc_connections = [module.llpg_geolive_database_ingestion[0].jdbc_connection_name]
281+
triggered_by_crawler = module.llpg_geolive_database_ingestion[0].crawler_name
282+
workflow_name = module.llpg_geolive_database_ingestion[0].workflow_name
283+
job_parameters = {
284+
"--s3_ingestion_bucket_target" = "s3://${module.raw_zone.bucket_id}/unrestricted/geolive/llpg/"
285+
"--s3_ingestion_details_target" = "s3://${module.raw_zone.bucket_id}/unrestricted/geolive/llpg/ingestion-details/"
286+
"--source_data_database" = module.llpg_geolive_database_ingestion[0].ingestion_database_name
287+
}
288+
crawler_details = {
289+
database_name = module.department_unrestricted.raw_zone_catalog_database_name
290+
s3_target_location = "s3://${module.raw_zone.bucket_id}/unrestricted/geolive/llpg/"
291+
configuration = jsonencode({
292+
Version = 1.0
293+
Grouping = {
294+
TableLevelConfiguration = 5
295+
}
296+
})
297+
table_prefix = null
298+
}
299+
}

terraform/core/22-sagemaker.tf

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -110,7 +110,7 @@ resource "aws_iam_role_policy_attachment" "shutdown_notebooks" {
110110
policy_arn = aws_iam_policy.shutdown_notebooks[0].arn
111111
}
112112

113-
# Lambda function
113+
# Lambda function
114114
data "archive_file" "shutdown_notebooks" {
115115
type = "zip"
116116
source_dir = "../../lambdas/shutdown_notebooks"
@@ -138,7 +138,7 @@ resource "aws_lambda_function" "shutdown_notebooks" {
138138

139139
role = aws_iam_role.shutdown_notebooks[0].arn
140140
handler = "main.shutdown_notebooks"
141-
runtime = "python3.8"
141+
runtime = "python3.9"
142142
function_name = "${local.short_identifier_prefix}shutdown-notebooks"
143143
s3_bucket = module.lambda_artefact_storage.bucket_id
144144
s3_key = aws_s3_object.shutdown_notebooks.key

terraform/core/38-api-ingestion.tf

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -16,7 +16,7 @@ module "icaseworks_api_ingestion" {
1616
lambda_artefact_storage_bucket = module.lambda_artefact_storage.bucket_id
1717
lambda_name = "icaseworks-api-ingestion"
1818
lambda_handler = "main.lambda_handler"
19-
runtime_language = "python3.8"
19+
runtime_language = "python3.9"
2020
secrets_manager_kms_key = aws_kms_key.secrets_manager_key
2121
s3_target_bucket_arn = module.landing_zone.bucket_arn
2222
s3_target_bucket_name = local.s3_target_bucket_name
@@ -44,7 +44,7 @@ module "vonage_api_ingestion" {
4444
lambda_artefact_storage_bucket = module.lambda_artefact_storage.bucket_id
4545
lambda_name = "vonage-api-ingestion"
4646
lambda_handler = "main.lambda_handler"
47-
runtime_language = "python3.8"
47+
runtime_language = "python3.9"
4848
secrets_manager_kms_key = aws_kms_key.secrets_manager_key
4949
s3_target_bucket_arn = module.landing_zone.bucket_arn
5050
s3_target_bucket_name = local.s3_target_bucket_name

0 commit comments

Comments
 (0)