Skip to content

Commit 3797500

Browse files
authored
Merge pull request #2097 from LBHackney-IT/revert-tascomi-aws-migration
Revert tascomi aws migration
2 parents 4e227a8 + 3cb9f1a commit 3797500

File tree

5 files changed

+8
-7
lines changed

5 files changed

+8
-7
lines changed

notebook/scripts/planning/load-table-from-tascomi-API-endpoint.ipynb

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -58,7 +58,7 @@
5858
"private_key = PRIVATE_KEY.encode('utf-8')\n",
5959
"\n",
6060
"table_to_read = \"\"\n",
61-
"request_uri = f'https://hackney-planning.idoxcloud.com/rest/v1/{table_to_read}'\n",
61+
"request_uri = f'https://hackney-planning.tascomi.com/rest/v1/{table_to_read}'\n",
6262
"request_method = 'GET'"
6363
]
6464
},

scripts/jobs/planning/tascomi_api_ingestion.py

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -87,7 +87,7 @@ def get_number_of_pages(resource, query=""):
8787

8888
headers = authenticate_tascomi(headers, public_key, private_key)
8989

90-
url = f'https://hackney-planning.idoxcloud.com/rest/v1/{resource}{query}'
90+
url = f'https://hackney-planning.tascomi.com/rest/v1/{resource}{query}'
9191
res = requests.get(url, data="", headers=headers)
9292
if res.status_code == 202:
9393
logger.info(f"received status code 202, whilst getting number of pages for {resource}, with query {query}")
@@ -144,7 +144,7 @@ def get_requests_since_last_import(resource, last_import_date):
144144
number_of_pages = number_of_pages_reponse["number_of_pages"]
145145
logger.info(f"Number of pages to retrieve for {day}: {number_of_pages}")
146146
requests_list += [RequestRow(page_number,
147-
f'https://hackney-planning.idoxcloud.com/rest/v1/{resource}?page={page_number}&last_updated={day}',
147+
f'https://hackney-planning.tascomi.com/rest/v1/{resource}?page={page_number}&last_updated={day}',
148148
"") for page_number in range(1, number_of_pages + 1)]
149149
number_of_requests = len(requests_list)
150150
if number_of_requests == 0:
@@ -162,7 +162,7 @@ def get_requests_for_full_load(resource):
162162
number_of_pages = number_of_pages_reponse["number_of_pages"]
163163
logger.info(f"Number of pages to retrieve: {number_of_pages}")
164164
requests_list = [
165-
RequestRow(page_number, f'https://hackney-planning.idoxcloud.com/rest/v1/{resource}?page={page_number}', "") for
165+
RequestRow(page_number, f'https://hackney-planning.tascomi.com/rest/v1/{resource}?page={page_number}', "") for
166166
page_number in range(1, number_of_pages + 1)]
167167
number_of_requests = len(requests_list)
168168
requests_list = sc.parallelize(requests_list)

scripts/tests/planning/test_tascomi_parse_tables.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -53,7 +53,7 @@ def test_parsed_row_data(self, spark):
5353
"creation_user_id": None,
5454
"title_id": "4",
5555
"page_number": 691,
56-
"import_api_url_requested": "https://hackney-planning.idoxcloud.com/rest/v1/contacts?page=691",
56+
"import_api_url_requested": "https://hackney-planning.tascomi.com/rest/v1/contacts?page=691",
5757
"import_api_status_code": 200,
5858
"import_exception_thrown": "",
5959
"import_datetime": datetime(2021, 9, 16, 13, 10),
@@ -69,7 +69,7 @@ def parse_json_into_dataframe(self, spark, column, data):
6969
data_with_imports = [
7070
{
7171
"page_number": 691,
72-
"import_api_url_requested": "https://hackney-planning.idoxcloud.com/rest/v1/contacts?page=691",
72+
"import_api_url_requested": "https://hackney-planning.tascomi.com/rest/v1/contacts?page=691",
7373
"import_api_status_code": 200,
7474
"import_exception_thrown": "",
7575
"import_datetime": datetime(2021, 9, 16, 13, 10),

terraform/core/15-unrestricted-geolive-database-ingestion.tf

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -280,6 +280,7 @@ module "llpg_geolive_ingestion_job" {
280280
jdbc_connections = [module.llpg_geolive_database_ingestion[0].jdbc_connection_name]
281281
triggered_by_crawler = module.llpg_geolive_database_ingestion[0].crawler_name
282282
workflow_name = module.llpg_geolive_database_ingestion[0].workflow_name
283+
max_retries = 1
283284
job_parameters = {
284285
"--s3_ingestion_bucket_target" = "s3://${module.raw_zone.bucket_id}/unrestricted/geolive/llpg/"
285286
"--s3_ingestion_details_target" = "s3://${module.raw_zone.bucket_id}/unrestricted/geolive/llpg/ingestion-details/"

terraform/etl/24-aws-glue-tascomi-data.tf

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -142,7 +142,7 @@ resource "aws_glue_trigger" "tascomi_tables_weekly_ingestion_triggers" {
142142

143143
name = "${local.short_identifier_prefix}Tascomi ${title(replace(each.value, "_", " "))} Ingestion Trigger"
144144
type = "SCHEDULED"
145-
schedule = "cron(0 21 ? * MON *)"
145+
schedule = "cron(0 16 ? * SUN *)"
146146
enabled = local.is_production_environment
147147

148148
actions {

0 commit comments

Comments
 (0)