Skip to content

Commit ce511e3

Browse files
committed
Merge branch 'master' into VED-717-Readme-VSCode
2 parents 3c3fb85 + 3c684b7 commit ce511e3

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

44 files changed

+610
-307
lines changed

.github/workflows/continuous-integration.yml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -8,7 +8,7 @@ jobs:
88
if: github.ref == 'refs/heads/master'
99
steps:
1010
- name: Checkout
11-
uses: actions/checkout@v4
11+
uses: actions/checkout@v5
1212
with:
1313
fetch-depth: 0 # This causes all history to be fetched, which is required for calculate-version to function
1414

.github/workflows/deploy-template.yml

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -24,7 +24,7 @@ jobs:
2424
run: aws sts get-caller-identity
2525

2626
- name: Checkout
27-
uses: actions/checkout@v4
27+
uses: actions/checkout@v5
2828
with:
2929
fetch-depth: 1
3030

@@ -53,7 +53,7 @@ jobs:
5353
name: int
5454
steps:
5555
- name: Checkout
56-
uses: actions/checkout@v4
56+
uses: actions/checkout@v5
5757

5858
- uses: aws-actions/configure-aws-credentials@v4
5959
with:
@@ -85,7 +85,7 @@ jobs:
8585
contents: read
8686
steps:
8787
- name: Checkout
88-
uses: actions/checkout@v4
88+
uses: actions/checkout@v5
8989

9090
- uses: aws-actions/configure-aws-credentials@v4
9191
with:

.github/workflows/sonarcloud.yml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -17,7 +17,7 @@ jobs:
1717
runs-on: ubuntu-latest
1818

1919
steps:
20-
- uses: actions/checkout@v4
20+
- uses: actions/checkout@v5
2121
with:
2222
fetch-depth: 0
2323

ack_backend/src/convert_message_to_ack_row.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,7 @@
11
"""Functions for converting the incoming message body into a row of ack data"""
22

33
from typing import Union
4-
from logging_decorators import convert_messsage_to_ack_row_logging_decorator
4+
from logging_decorators import convert_message_to_ack_row_logging_decorator
55
from update_ack_file import create_ack_data
66

77

@@ -19,7 +19,7 @@ def get_error_message_for_ack_file(message_diagnostics) -> Union[None, str]:
1919
return message_diagnostics.get("error_message", "Unable to determine diagnostics issue")
2020

2121

22-
@convert_messsage_to_ack_row_logging_decorator
22+
@convert_message_to_ack_row_logging_decorator
2323
def convert_message_to_ack_row(message, created_at_formatted_string):
2424
"""
2525
Takes a single message and returns the ack data row for that message.

ack_backend/src/logging_decorators.py

Lines changed: 14 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -22,22 +22,29 @@ def send_log_to_firehose(log_data: dict) -> None:
2222

2323

2424
def generate_and_send_logs(
25-
start_time, base_log_data: dict, additional_log_data: dict, is_error_log: bool = False
25+
start_time: float,
26+
base_log_data: dict,
27+
additional_log_data: dict,
28+
use_ms_precision: bool = False,
29+
is_error_log: bool = False
2630
) -> None:
2731
"""Generates log data which includes the base_log_data, additional_log_data, and time taken (calculated using the
2832
current time and given start_time) and sends them to Cloudwatch and Firehose."""
29-
log_data = {**base_log_data, "time_taken": f"{round(time.time() - start_time, 5)}s", **additional_log_data}
33+
seconds_elapsed = time.time() - start_time
34+
formatted_time_elapsed = f"{round(seconds_elapsed * 1000, 5)}ms" if use_ms_precision else \
35+
f"{round(seconds_elapsed, 5)}s"
36+
37+
log_data = {**base_log_data, "time_taken": formatted_time_elapsed, **additional_log_data}
3038
log_function = logger.error if is_error_log else logger.info
3139
log_function(json.dumps(log_data))
3240
send_log_to_firehose(log_data)
3341

3442

35-
def convert_messsage_to_ack_row_logging_decorator(func):
43+
def convert_message_to_ack_row_logging_decorator(func):
3644
"""This decorator logs the information on the conversion of a single message to an ack data row"""
3745

3846
@wraps(func)
3947
def wrapper(message, created_at_formatted_string):
40-
4148
base_log_data = {"function_name": f"ack_processor_{func.__name__}", "date_time": str(datetime.now())}
4249
start_time = time.time()
4350

@@ -57,13 +64,14 @@ def wrapper(message, created_at_formatted_string):
5764
"operation_requested": message.get("operation_requested", "unknown"),
5865
**process_diagnostics(diagnostics, file_key, message_id),
5966
}
60-
generate_and_send_logs(start_time, base_log_data, additional_log_data)
67+
generate_and_send_logs(start_time, base_log_data, additional_log_data, use_ms_precision=True)
6168

6269
return result
6370

6471
except Exception as error:
6572
additional_log_data = {"status": "fail", "statusCode": 500, "diagnostics": str(error)}
66-
generate_and_send_logs(start_time, base_log_data, additional_log_data, is_error_log=True)
73+
generate_and_send_logs(start_time, base_log_data, additional_log_data, use_ms_precision=True,
74+
is_error_log=True)
6775
raise
6876

6977
return wrapper

ack_backend/tests/utils/values_for_ack_backend_tests.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -157,7 +157,7 @@ class ValidValues:
157157
"vaccine_type": MOCK_MESSAGE_DETAILS.vaccine_type,
158158
"message_id": MOCK_MESSAGE_DETAILS.row_id,
159159
"operation_requested": "CREATE",
160-
"time_taken": "1.0s",
160+
"time_taken": "1000.0ms",
161161
"local_id": MOCK_MESSAGE_DETAILS.local_id,
162162
"statusCode": 200,
163163
"diagnostics": "Operation completed successfully",
@@ -234,7 +234,7 @@ class InvalidValues:
234234
"vaccine_type": "unknown",
235235
"message_id": "unknown",
236236
"operation_requested": "unknown",
237-
"time_taken": "1.0s",
237+
"time_taken": "1000.0ms",
238238
"local_id": "unknown",
239239
"statusCode": 500,
240240
"diagnostics": "An unhandled error occurred during batch processing",

0 commit comments

Comments
 (0)