Skip to content

Commit dd04ef3

Browse files
committed
Merge branch 'feature/PI-546-etl_item_already_exists' into release/2024-10-22
2 parents 2a1b08a + d79437d commit dd04ef3

File tree

6 files changed

+57
-2
lines changed

6 files changed

+57
-2
lines changed

infrastructure/swagger/04_apigee.yaml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -17,4 +17,4 @@ x-nhsd-apim:
1717
ratelimiting:
1818
proxy:
1919
timeunit: "minute"
20-
limit: 2000
20+
limit: 6000

infrastructure/terraform/per_workspace/modules/etl/sds/main.tf

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -558,7 +558,7 @@ module "schedule_trigger_update" {
558558
source = "./schedule/"
559559
lambda_arn = module.trigger_update.lambda_function.lambda_function_arn
560560
lambda_name = module.trigger_update.lambda_function.lambda_function_name
561-
schedule_expression = contains(["prod"], var.environment) ? "rate(15 minutes)" : "cron(0 0 1 1 ? 2000)" # changelog schedule only active for prod
561+
schedule_expression = "cron(0 0 1 1 ? 2000)" # Will never run. To turn on set to: contains(["prod"], var.environment) ? "rate(15 minutes)" : "cron(0 0 1 1 ? 2000)"
562562
}
563563

564564
module "bulk_trigger_notification" {

scripts/etl/decode_load.py

Lines changed: 55 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,55 @@
1+
"""
2+
A script that decodes the unprocessed file from the s3 folder input--load/
3+
4+
You can provide the downloaded file path, and chosen decoded file destination.
5+
"""
6+
7+
import json
8+
import pickle
9+
from collections import deque
10+
11+
import lz4.frame
12+
13+
14+
def lz4_pickle_decode_and_save(file_path: str, output_file: str):
15+
try:
16+
# Read the compressed pickle data from the file
17+
with open(file_path, "rb") as f:
18+
compressed_data = f.read()
19+
20+
# Decompress the LZ4 data
21+
decompressed_data = lz4.frame.decompress(compressed_data)
22+
23+
# Unpickle the decompressed data
24+
decoded_data = pickle.loads(decompressed_data)
25+
26+
# If the data is wrapped in a deque, convert it to a list
27+
if isinstance(decoded_data, deque):
28+
decoded_data = list(decoded_data)
29+
30+
# Save the decoded data to a file
31+
with open(output_file, "w") as json_file:
32+
json.dump(decoded_data, json_file, indent=2, default=str)
33+
34+
print(f"Decoded transaction(s) saved to {output_file}") # noqa
35+
36+
return decoded_data
37+
38+
except lz4.frame.LZ4FrameError as e:
39+
print(f"LZ4 decompression error: {e}") # noqa
40+
except pickle.UnpicklingError as e:
41+
print(f"Unpickling error: {e}") # noqa
42+
except FileNotFoundError:
43+
print(f"File not found: {file_path}") # noqa
44+
except Exception as e:
45+
print(f"An unexpected error occurred: {e}") # noqa
46+
47+
48+
# Example usage
49+
if __name__ == "__main__":
50+
# Provide your file path and output file path here
51+
file_path = "unprocessed-11"
52+
output_file = "decoded_transaction.json"
53+
54+
# Decode the data from the file and save it to JSON
55+
decoded_deque = lz4_pickle_decode_and_save(file_path, output_file)

src/etl/sds/tests/changelog/scenarios/message_handling_system/can_add_and_delete_device_with_same_unique_identifier/__init__.py renamed to src/etl/sds/tests/changelog/scenarios/message_handling_system/can_delete_and_add_device_with_same_unique_identifier/__init__.py

File renamed without changes.

src/etl/sds/tests/changelog/scenarios/message_handling_system/can_add_and_delete_device_with_same_unique_identifier/extract_output.json renamed to src/etl/sds/tests/changelog/scenarios/message_handling_system/can_delete_and_add_device_with_same_unique_identifier/extract_output.json

File renamed without changes.

src/etl/sds/tests/changelog/scenarios/message_handling_system/can_add_and_delete_device_with_same_unique_identifier/load_output.json renamed to src/etl/sds/tests/changelog/scenarios/message_handling_system/can_delete_and_add_device_with_same_unique_identifier/load_output.json

File renamed without changes.

0 commit comments

Comments
 (0)