diff --git a/.github/dependabot.yml b/.github/dependabot.yml index e496d9ae1..0ce85313a 100644 --- a/.github/dependabot.yml +++ b/.github/dependabot.yml @@ -7,7 +7,7 @@ version: 2 updates: - package-ecosystem: "docker" directories: - - "/ack_backend" + - "/lambdas/ack_backend" - "/delta_backend" - "/filenameprocessor" - "/grafana/non-prod/docker" @@ -49,7 +49,6 @@ updates: - package-ecosystem: "pip" directories: - "/" - - "/ack_backend" - "/backend" - "/batch_processor_filter" - "/delta_backend" @@ -58,6 +57,7 @@ updates: - "/filenameprocessor" - "/mesh_processor" - "/recordprocessor" + - "/lambdas/ack_backend" - "/lambdas/redis_sync" - "/lambdas/id_sync" - "/lambdas/mns_subscription" diff --git a/.github/workflows/quality-checks.yml b/.github/workflows/quality-checks.yml index 2996f3a8f..41e1e9615 100644 --- a/.github/workflows/quality-checks.yml +++ b/.github/workflows/quality-checks.yml @@ -106,17 +106,6 @@ jobs: poetry run coverage run -m unittest discover -p "*batch*.py" || echo "recordforwarder tests failed" >> ../failed_tests.txt poetry run coverage xml -o ../recordforwarder-coverage.xml - - name: Run unittest with coverage-ack-lambda - working-directory: ack_backend - id: acklambda - env: - PYTHONPATH: ${{ github.workspace }}/ack_backend/src:${{ github.workspace }}/ack_backend/tests - continue-on-error: true - run: | - poetry install - poetry run coverage run -m unittest discover || echo "ack-lambda tests failed" >> ../failed_tests.txt - poetry run coverage xml -o ../ack-lambda-coverage.xml - - name: Run unittest with coverage-delta working-directory: delta_backend id: delta @@ -148,6 +137,17 @@ jobs: poetry run coverage run -m unittest discover || echo "mesh_processor tests failed" >> ../failed_tests.txt poetry run coverage xml -o ../mesh_processor-coverage.xml + - name: Run unittest with coverage-ack-lambda + working-directory: lambdas/ack_backend + id: acklambda + env: + PYTHONPATH: ${{ env.LAMBDA_PATH }}/ack_backend/src:${{ github.workspace }}/ack_backend/tests + continue-on-error: true + run: | + poetry install + poetry run coverage run --source=src -m unittest discover || echo "ack-lambda tests failed" >> ../../failed_tests.txt + poetry run coverage xml -o ../../ack-lambda-coverage.xml + - name: Run unittest with coverage-mns-subscription working-directory: lambdas/mns_subscription id: mns_subscription diff --git a/Makefile b/Makefile index 91a930bdb..1247fec75 100644 --- a/Makefile +++ b/Makefile @@ -1,6 +1,6 @@ SHELL=/usr/bin/env bash -euo pipefail -PYTHON_PROJECT_DIRS_WITH_UNIT_TESTS = ack_backend backend batch_processor_filter delta_backend filenameprocessor mesh_processor recordprocessor lambdas/redis_sync lambdas/id_sync lambdas/mns_subscription lambdas/shared +PYTHON_PROJECT_DIRS_WITH_UNIT_TESTS = backend batch_processor_filter delta_backend filenameprocessor mesh_processor recordprocessor lambdas/ack_backend lambdas/redis_sync lambdas/id_sync lambdas/mns_subscription lambdas/shared PYTHON_PROJECT_DIRS = e2e e2e_batch $(PYTHON_PROJECT_DIRS_WITH_UNIT_TESTS) #Installs dependencies using poetry. diff --git a/ack_backend/Makefile b/ack_backend/Makefile deleted file mode 100644 index 90d74aed8..000000000 --- a/ack_backend/Makefile +++ /dev/null @@ -1,11 +0,0 @@ -build: - docker build -t ack-lambda-build . - -package: build - mkdir -p build - docker run --rm -v $(shell pwd)/build:/build ack-lambda-build - -test: - @PYTHONPATH=src:tests python -m unittest - -.PHONY: build package diff --git a/ack_backend/src/clients.py b/ack_backend/src/clients.py deleted file mode 100644 index 380bf3d22..000000000 --- a/ack_backend/src/clients.py +++ /dev/null @@ -1,23 +0,0 @@ -"""Initialise clients and logger""" - -import logging -from boto3 import client as boto3_client, resource as boto3_resource - -REGION_NAME = "eu-west-2" - -firehose_client = boto3_client("firehose", region_name=REGION_NAME) -dynamodb_client = boto3_client("dynamodb", region_name=REGION_NAME) - -dynamodb_resource = boto3_resource("dynamodb", region_name=REGION_NAME) - -s3_client = None -def get_s3_client(): - global s3_client - if s3_client is None: - s3_client = boto3_client("s3", region_name=REGION_NAME) - return s3_client - -# Logger -logging.basicConfig(level="INFO") -logger = logging.getLogger() -logger.setLevel("INFO") diff --git a/ack_backend/src/errors.py b/ack_backend/src/errors.py deleted file mode 100644 index f892c6ce0..000000000 --- a/ack_backend/src/errors.py +++ /dev/null @@ -1,5 +0,0 @@ -"""Custom exceptions for the Ack lambda.""" - - -class UnhandledAuditTableError(Exception): - """A custom exception for when an unexpected error occurs whilst adding the file to the audit table.""" diff --git a/immunisation-fhir-api.code-workspace b/immunisation-fhir-api.code-workspace index 81cb39d91..70e325df8 100644 --- a/immunisation-fhir-api.code-workspace +++ b/immunisation-fhir-api.code-workspace @@ -12,9 +12,6 @@ { "path": "recordprocessor" }, - { - "path": "ack_backend" - }, { "path": "delta_backend" }, @@ -27,6 +24,9 @@ { "path": "e2e_batch" }, + { + "path": "lambdas/ack_backend" + }, { "path": "lambdas/redis_sync" }, diff --git a/lambdas/ack_backend/.vscode/settings.json.default b/lambdas/ack_backend/.vscode/settings.json.default new file mode 100644 index 000000000..71890ff30 --- /dev/null +++ b/lambdas/ack_backend/.vscode/settings.json.default @@ -0,0 +1,18 @@ +{ + "python.analysis.extraPaths": [ + "./src" + ], + "python.testing.unittestArgs": [ + "-v", + "-s", + "./", + "-p", + "test_*.py" + ], + "python.testing.pytestEnabled": false, + "python.testing.unittestEnabled": true, + "pylint.args": [ + "--init-hook", + "import sys; sys.path.append('./src')" + ] +} \ No newline at end of file diff --git a/ack_backend/Dockerfile b/lambdas/ack_backend/Dockerfile similarity index 59% rename from ack_backend/Dockerfile rename to lambdas/ack_backend/Dockerfile index ebb8b1fa4..ffa2a52da 100644 --- a/ack_backend/Dockerfile +++ b/lambdas/ack_backend/Dockerfile @@ -5,12 +5,31 @@ RUN mkdir -p /home/appuser && \ echo 'appuser:x:1001:' >> /etc/group && \ chown -R 1001:1001 /home/appuser && pip install "poetry~=2.1.4" -COPY poetry.lock pyproject.toml README.md ./ +# Install Poetry dependencies +# Copy ack_backend Poetry files +COPY ./ack_backend/poetry.lock ./ack_backend/pyproject.toml ./ + +# Install ack_backend dependencies +WORKDIR /var/task RUN poetry config virtualenvs.create false && poetry install --no-interaction --no-ansi --no-root --only main + # ----------------------------- FROM base AS build -COPY src . + +# Set working directory back to Lambda task root +WORKDIR /var/task + +# Copy shared source code +COPY ./shared/src/common ./common + +# Copy ack_backend source code +COPY ./ack_backend/src . + +# Set correct permissions RUN chmod 644 $(find . -type f) && chmod 755 $(find . -type d) + # Switch to the non-root user for running the container USER 1001:1001 + +# Set the Lambda handler CMD ["ack_processor.lambda_handler"] diff --git a/lambdas/ack_backend/Makefile b/lambdas/ack_backend/Makefile new file mode 100644 index 000000000..2efb15683 --- /dev/null +++ b/lambdas/ack_backend/Makefile @@ -0,0 +1,22 @@ +TEST_ENV := @PYTHONPATH=src:tests:../shared/src + +build: + docker build -t ack-lambda-build . + +package: build + mkdir -p build + docker run --rm -v $(shell pwd)/build:/build ack-lambda-build + +test: + $(TEST_ENV) python -m unittest + +coverage-run: + $(TEST_ENV) coverage run --source=src -m unittest discover -v + +coverage-report: + $(TEST_ENV) coverage report -m + +coverage-html: + $(TEST_ENV) coverage html + +.PHONY: build package diff --git a/ack_backend/README.md b/lambdas/ack_backend/README.md similarity index 100% rename from ack_backend/README.md rename to lambdas/ack_backend/README.md diff --git a/ack_backend/poetry.lock b/lambdas/ack_backend/poetry.lock similarity index 100% rename from ack_backend/poetry.lock rename to lambdas/ack_backend/poetry.lock diff --git a/ack_backend/pyproject.toml b/lambdas/ack_backend/pyproject.toml similarity index 84% rename from ack_backend/pyproject.toml rename to lambdas/ack_backend/pyproject.toml index fbffef2dc..bd9579dd1 100644 --- a/ack_backend/pyproject.toml +++ b/lambdas/ack_backend/pyproject.toml @@ -5,7 +5,8 @@ description = "" authors = ["Your Name "] readme = "README.md" packages = [ - {include = "src"} + {include = "src"}, + {include = "common", from = "../shared/src"} ] [tool.poetry.dependencies] diff --git a/ack_backend/src/ack_processor.py b/lambdas/ack_backend/src/ack_processor.py similarity index 100% rename from ack_backend/src/ack_processor.py rename to lambdas/ack_backend/src/ack_processor.py diff --git a/ack_backend/src/audit_table.py b/lambdas/ack_backend/src/audit_table.py similarity index 91% rename from ack_backend/src/audit_table.py rename to lambdas/ack_backend/src/audit_table.py index 78cf9bf00..48a723d27 100644 --- a/ack_backend/src/audit_table.py +++ b/lambdas/ack_backend/src/audit_table.py @@ -1,7 +1,7 @@ """Add the filename to the audit table and check for duplicates.""" -from clients import dynamodb_client, logger -from errors import UnhandledAuditTableError +from common.clients import dynamodb_client, logger +from common.models.errors import UnhandledAuditTableError from constants import AUDIT_TABLE_NAME, FileStatus, AuditTableKeys diff --git a/ack_backend/src/constants.py b/lambdas/ack_backend/src/constants.py similarity index 99% rename from ack_backend/src/constants.py rename to lambdas/ack_backend/src/constants.py index b6d18bec6..f6229dbd0 100644 --- a/ack_backend/src/constants.py +++ b/lambdas/ack_backend/src/constants.py @@ -4,14 +4,17 @@ AUDIT_TABLE_NAME = os.getenv("AUDIT_TABLE_NAME") + def get_source_bucket_name() -> str: """Get the SOURCE_BUCKET_NAME environment from environment variables.""" return os.getenv("SOURCE_BUCKET_NAME") + def get_ack_bucket_name() -> str: """Get the ACK_BUCKET_NAME environment from environment variables.""" return os.getenv("ACK_BUCKET_NAME") + class FileStatus: """File status constants""" diff --git a/ack_backend/src/convert_message_to_ack_row.py b/lambdas/ack_backend/src/convert_message_to_ack_row.py similarity index 93% rename from ack_backend/src/convert_message_to_ack_row.py rename to lambdas/ack_backend/src/convert_message_to_ack_row.py index d81407ade..0ddce6791 100644 --- a/ack_backend/src/convert_message_to_ack_row.py +++ b/lambdas/ack_backend/src/convert_message_to_ack_row.py @@ -1,11 +1,10 @@ """Functions for converting the incoming message body into a row of ack data""" -from typing import Union from logging_decorators import convert_message_to_ack_row_logging_decorator from update_ack_file import create_ack_data -def get_error_message_for_ack_file(message_diagnostics) -> Union[None, str]: +def get_error_message_for_ack_file(message_diagnostics) -> None | str: """Determines and returns the error message to be displayed in the ack file""" if message_diagnostics is None: return None diff --git a/ack_backend/src/logging_decorators.py b/lambdas/ack_backend/src/logging_decorators.py similarity index 64% rename from ack_backend/src/logging_decorators.py rename to lambdas/ack_backend/src/logging_decorators.py index 0790d2ce4..34fbca92b 100644 --- a/ack_backend/src/logging_decorators.py +++ b/lambdas/ack_backend/src/logging_decorators.py @@ -1,51 +1,21 @@ """Decorators for logging and sending logs to Firehose""" import os -import json import time from datetime import datetime from functools import wraps -from clients import firehose_client, logger - +from common.log_decorator import generate_and_send_logs +PREFIX = "ack_processor" STREAM_NAME = os.getenv("SPLUNK_FIREHOSE_NAME", "immunisation-fhir-api-internal-dev-splunk-firehose") -def send_log_to_firehose(log_data: dict) -> None: - """Sends the log_message to Firehose""" - try: - record = {"Data": json.dumps({"event": log_data}).encode("utf-8")} - firehose_client.put_record(DeliveryStreamName=STREAM_NAME, Record=record) - logger.info("Log sent to Firehose") - except Exception as error: # pylint:disable = broad-exception-caught - logger.exception("Error sending log to Firehose: %s", error) - - -def generate_and_send_logs( - start_time: float, - base_log_data: dict, - additional_log_data: dict, - use_ms_precision: bool = False, - is_error_log: bool = False -) -> None: - """Generates log data which includes the base_log_data, additional_log_data, and time taken (calculated using the - current time and given start_time) and sends them to Cloudwatch and Firehose.""" - seconds_elapsed = time.time() - start_time - formatted_time_elapsed = f"{round(seconds_elapsed * 1000, 5)}ms" if use_ms_precision else \ - f"{round(seconds_elapsed, 5)}s" - - log_data = {**base_log_data, "time_taken": formatted_time_elapsed, **additional_log_data} - log_function = logger.error if is_error_log else logger.info - log_function(json.dumps(log_data)) - send_log_to_firehose(log_data) - - def convert_message_to_ack_row_logging_decorator(func): """This decorator logs the information on the conversion of a single message to an ack data row""" @wraps(func) def wrapper(message, created_at_formatted_string): - base_log_data = {"function_name": f"ack_processor_{func.__name__}", "date_time": str(datetime.now())} + base_log_data = {"function_name": f"{PREFIX}_{func.__name__}", "date_time": str(datetime.now())} start_time = time.time() try: @@ -66,13 +36,13 @@ def wrapper(message, created_at_formatted_string): "operation_requested": message.get("operation_requested", "unknown"), **process_diagnostics(diagnostics, file_key, message_id), } - generate_and_send_logs(start_time, base_log_data, additional_log_data, use_ms_precision=True) + generate_and_send_logs(STREAM_NAME, start_time, base_log_data, additional_log_data, use_ms_precision=True) return result except Exception as error: additional_log_data = {"status": "fail", "statusCode": 500, "diagnostics": str(error)} - generate_and_send_logs(start_time, base_log_data, additional_log_data, use_ms_precision=True, + generate_and_send_logs(STREAM_NAME, start_time, base_log_data, additional_log_data, use_ms_precision=True, is_error_log=True) raise @@ -85,7 +55,7 @@ def upload_ack_file_logging_decorator(func): @wraps(func) def wrapper(*args, **kwargs): - base_log_data = {"function_name": f"ack_processor_{func.__name__}", "date_time": str(datetime.now())} + base_log_data = {"function_name": f"{PREFIX}_{func.__name__}", "date_time": str(datetime.now())} start_time = time.time() # NB this doesn't require a try-catch block as the wrapped function never throws an exception @@ -94,7 +64,7 @@ def wrapper(*args, **kwargs): message_for_logs = "Record processing complete" base_log_data.update(result) additional_log_data = {"status": "success", "statusCode": 200, "message": message_for_logs} - generate_and_send_logs(start_time, base_log_data, additional_log_data) + generate_and_send_logs(STREAM_NAME, start_time, base_log_data, additional_log_data) return result return wrapper @@ -106,19 +76,19 @@ def ack_lambda_handler_logging_decorator(func): @wraps(func) def wrapper(event, context, *args, **kwargs): - base_log_data = {"function_name": f"ack_processor_{func.__name__}", "date_time": str(datetime.now())} + base_log_data = {"function_name": f"{PREFIX}_{func.__name__}", "date_time": str(datetime.now())} start_time = time.time() try: result = func(event, context, *args, **kwargs) message_for_logs = "Lambda function executed successfully!" additional_log_data = {"status": "success", "statusCode": 200, "message": message_for_logs} - generate_and_send_logs(start_time, base_log_data, additional_log_data) + generate_and_send_logs(STREAM_NAME, start_time, base_log_data, additional_log_data) return result except Exception as error: additional_log_data = {"status": "fail", "statusCode": 500, "diagnostics": str(error)} - generate_and_send_logs(start_time, base_log_data, additional_log_data, is_error_log=True) + generate_and_send_logs(STREAM_NAME, start_time, base_log_data, additional_log_data, is_error_log=True) raise return wrapper diff --git a/ack_backend/src/update_ack_file.py b/lambdas/ack_backend/src/update_ack_file.py similarity index 98% rename from ack_backend/src/update_ack_file.py rename to lambdas/ack_backend/src/update_ack_file.py index ef72501d2..85d6471d4 100644 --- a/ack_backend/src/update_ack_file.py +++ b/lambdas/ack_backend/src/update_ack_file.py @@ -1,13 +1,13 @@ """Functions for uploading the data to the ack file""" -from io import StringIO, BytesIO -from typing import Union, Optional from botocore.exceptions import ClientError -from constants import ACK_HEADERS, get_source_bucket_name, get_ack_bucket_name +from io import StringIO, BytesIO +from typing import Optional from audit_table import change_audit_table_status_to_processed -from clients import get_s3_client, logger -from utils_for_ack_lambda import get_row_count +from common.clients import get_s3_client, logger +from constants import ACK_HEADERS, get_source_bucket_name, get_ack_bucket_name from logging_decorators import upload_ack_file_logging_decorator +from utils_for_ack_lambda import get_row_count def create_ack_data( @@ -15,7 +15,7 @@ def create_ack_data( local_id: str, row_id: str, successful_api_response: bool, - diagnostics: Union[None, str] = None, + diagnostics: None | str = None, imms_id: str = None, ) -> dict: """Returns a dictionary containing the ack headers as keys, along with the relevant values.""" diff --git a/ack_backend/src/utils_for_ack_lambda.py b/lambdas/ack_backend/src/utils_for_ack_lambda.py similarity index 90% rename from ack_backend/src/utils_for_ack_lambda.py rename to lambdas/ack_backend/src/utils_for_ack_lambda.py index 5f2be9068..29b7d4c66 100644 --- a/ack_backend/src/utils_for_ack_lambda.py +++ b/lambdas/ack_backend/src/utils_for_ack_lambda.py @@ -1,6 +1,6 @@ """Utils for ack lambda""" -from clients import get_s3_client +from common.clients import get_s3_client def get_row_count(bucket_name: str, file_key: str) -> int: diff --git a/ack_backend/tests/__init__.py b/lambdas/ack_backend/tests/__init__.py similarity index 100% rename from ack_backend/tests/__init__.py rename to lambdas/ack_backend/tests/__init__.py diff --git a/ack_backend/tests/test_ack_processor.py b/lambdas/ack_backend/tests/test_ack_processor.py similarity index 93% rename from ack_backend/tests/test_ack_processor.py rename to lambdas/ack_backend/tests/test_ack_processor.py index 767313908..382cebcc5 100644 --- a/ack_backend/tests/test_ack_processor.py +++ b/lambdas/ack_backend/tests/test_ack_processor.py @@ -50,7 +50,7 @@ def setUp(self) -> None: Key=f"processing/{MOCK_MESSAGE_DETAILS.file_key}", Body=mock_source_file_with_100_rows.getvalue(), ) - self.logger_info_patcher = patch('logging_decorators.logger.info') + self.logger_info_patcher = patch('common.log_decorator.logger.info') self.mock_logger_info = self.logger_info_patcher.start() def tearDown(self) -> None: @@ -113,8 +113,13 @@ def test_lambda_handler_main_multiple_records(self): response = lambda_handler(event=event, context={}) self.assertEqual(response, EXPECTED_ACK_LAMBDA_RESPONSE_FOR_SUCCESS) - validate_ack_file_content(self.s3_client, - [*array_of_success_messages, *array_of_failure_messages, *array_of_mixed_success_and_failure_messages], + validate_ack_file_content( + self.s3_client, + [ + *array_of_success_messages, + *array_of_failure_messages, + *array_of_mixed_success_and_failure_messages + ], existing_file_content=ValidValues.ack_headers, ) @@ -169,7 +174,10 @@ def test_lambda_handler_main(self): # TODO: None of the test cases have any existing ack file content? with self.subTest(msg=f"Existing ack file: {test_case['description']}"): existing_ack_file_content = test_case.get("existing_ack_file_content", "") - setup_existing_ack_file(MOCK_MESSAGE_DETAILS.temp_ack_file_key, existing_ack_file_content, self.s3_client) + setup_existing_ack_file( + MOCK_MESSAGE_DETAILS.temp_ack_file_key, + existing_ack_file_content, self.s3_client + ) response = lambda_handler(event=self.generate_event(test_case["messages"]), context={}) self.assertEqual(response, EXPECTED_ACK_LAMBDA_RESPONSE_FOR_SUCCESS) validate_ack_file_content(self.s3_client, test_case["messages"], existing_ack_file_content) @@ -194,10 +202,10 @@ def test_lambda_handler_error_scenarios(self): for test_case in test_cases: with self.subTest(msg=test_case["description"]): - with patch("logging_decorators.send_log_to_firehose") as mock_send_log_to_firehose: + with patch("common.log_decorator.send_log_to_firehose") as mock_send_log_to_firehose: with self.assertRaises(Exception): lambda_handler(event=test_case["event"], context={}) - error_log = mock_send_log_to_firehose.call_args[0][0] + error_log = mock_send_log_to_firehose.call_args[0][1] self.assertIn(test_case["expected_message"], error_log["diagnostics"]) diff --git a/ack_backend/tests/test_audit_table.py b/lambdas/ack_backend/tests/test_audit_table.py similarity index 93% rename from ack_backend/tests/test_audit_table.py rename to lambdas/ack_backend/tests/test_audit_table.py index a918a545e..76c2a0a8b 100644 --- a/ack_backend/tests/test_audit_table.py +++ b/lambdas/ack_backend/tests/test_audit_table.py @@ -1,7 +1,8 @@ import unittest -from unittest.mock import patch, MagicMock +from unittest.mock import patch import audit_table -from errors import UnhandledAuditTableError +from common.models.errors import UnhandledAuditTableError + class TestAuditTable(unittest.TestCase): diff --git a/ack_backend/tests/test_convert_message_to_ack_row.py b/lambdas/ack_backend/tests/test_convert_message_to_ack_row.py similarity index 100% rename from ack_backend/tests/test_convert_message_to_ack_row.py rename to lambdas/ack_backend/tests/test_convert_message_to_ack_row.py diff --git a/ack_backend/tests/test_logging_decorators.py b/lambdas/ack_backend/tests/test_logging_decorators.py similarity index 70% rename from ack_backend/tests/test_logging_decorators.py rename to lambdas/ack_backend/tests/test_logging_decorators.py index ee410b91e..8a0c01041 100644 --- a/ack_backend/tests/test_logging_decorators.py +++ b/lambdas/ack_backend/tests/test_logging_decorators.py @@ -1,50 +1,20 @@ import unittest -from unittest.mock import patch, MagicMock, call +from unittest.mock import patch import logging_decorators -import json -import time + class TestLoggingDecorators(unittest.TestCase): def setUp(self): # Patch logger and firehose_client - self.logger_patcher = patch('logging_decorators.logger') + self.logger_patcher = patch('common.log_decorator.logger') self.mock_logger = self.logger_patcher.start() - self.firehose_patcher = patch('logging_decorators.firehose_client') + self.firehose_patcher = patch('common.log_decorator.firehose_client') self.mock_firehose = self.firehose_patcher.start() def tearDown(self): self.logger_patcher.stop() self.firehose_patcher.stop() - def test_send_log_to_firehose_success(self): - log_data = {"foo": "bar"} - logging_decorators.send_log_to_firehose(log_data) - self.mock_firehose.put_record.assert_called_once() - self.mock_logger.info.assert_called_with("Log sent to Firehose") - - def test_send_log_to_firehose_exception(self): - self.mock_firehose.put_record.side_effect = Exception("fail!") - log_data = {"foo": "bar"} - logging_decorators.send_log_to_firehose(log_data) - self.mock_logger.exception.assert_called() - self.assertIn("Error sending log to Firehose", self.mock_logger.exception.call_args[0][0]) - - def test_generate_and_send_logs_info(self): - start_time = time.time() - 1 - base_log_data = {"base": "data"} - additional_log_data = {"extra": "info"} - logging_decorators.generate_and_send_logs(start_time, base_log_data, additional_log_data) - self.mock_logger.info.assert_called() - self.mock_firehose.put_record.assert_called_once() - - def test_generate_and_send_logs_error(self): - start_time = time.time() - 1 - base_log_data = {"base": "data"} - additional_log_data = {"extra": "info"} - logging_decorators.generate_and_send_logs(start_time, base_log_data, additional_log_data, is_error_log=True) - self.mock_logger.error.assert_called() - self.mock_firehose.put_record.assert_called_once() - def test_process_diagnostics_dict(self): diagnostics = {"statusCode": 400, "error_message": "bad request"} result = logging_decorators.process_diagnostics(diagnostics, "file.csv", "msg-1") @@ -127,5 +97,6 @@ def dummy_lambda(event, context): self.mock_logger.error.assert_called() self.mock_firehose.put_record.assert_called() + if __name__ == "__main__": - unittest.main() \ No newline at end of file + unittest.main() diff --git a/ack_backend/tests/test_splunk_logging.py b/lambdas/ack_backend/tests/test_splunk_logging.py similarity index 81% rename from ack_backend/tests/test_splunk_logging.py rename to lambdas/ack_backend/tests/test_splunk_logging.py index 7172d4e28..40edbe906 100644 --- a/ack_backend/tests/test_splunk_logging.py +++ b/lambdas/ack_backend/tests/test_splunk_logging.py @@ -6,6 +6,7 @@ from contextlib import ExitStack from moto import mock_s3 from boto3 import client as boto3_client +from common.log_decorator import generate_and_send_logs, send_log_to_firehose from tests.utils.values_for_ack_backend_tests import ( ValidValues, @@ -29,6 +30,7 @@ class TestLoggingDecorators(unittest.TestCase): def setUp(self): self.s3_client = boto3_client("s3", region_name="eu-west-2") GenericSetUp(self.s3_client) + self.stream_name = MOCK_ENVIRONMENT_DICT["FIREHOSE_STREAM_NAME"] # MOCK SOURCE FILE WITH 100 ROWS TO SIMULATE THE SCENARIO WHERE THE ACK FILE IS NO FULL. # TODO: Test all other scenarios. @@ -104,10 +106,10 @@ def test_splunk_logging_successful_rows(self): """Tests a single object in the body of the event""" for operation in ["CREATE", "UPDATE", "DELETE"]: - with ( - patch("logging_decorators.send_log_to_firehose") as mock_send_log_to_firehose, - patch("logging_decorators.logger") as mock_logger, - ): + with ( # noqa: E999 + patch("common.log_decorator.send_log_to_firehose") as mock_send_log_to_firehose, # noqa: E999 + patch("common.log_decorator.logger") as mock_logger, # noqa: E999 + ): # noqa: E999 result = lambda_handler(event=generate_event([{"operation_requested": operation}]), context={}) self.assertEqual(result, {"statusCode": 200, "body": json.dumps("Lambda function executed successfully!")}) @@ -126,23 +128,27 @@ def test_splunk_logging_successful_rows(self): self.assertEqual(second_logger_info_call_args, expected_second_logger_info_data) mock_send_log_to_firehose.assert_has_calls( - [call(expected_first_logger_info_data), call(expected_second_logger_info_data)] + [ + call(self.stream_name, expected_first_logger_info_data), + call(self.stream_name, expected_second_logger_info_data) + ] ) def test_splunk_logging_missing_data(self): """Tests missing key values in the body of the event""" - with ( - patch("logging_decorators.send_log_to_firehose") as mock_send_log_to_firehose, - patch("logging_decorators.logger") as mock_logger, - ): + with ( # noqa: E999 + patch("common.log_decorator.send_log_to_firehose") as mock_send_log_to_firehose, # noqa: E999 + patch("common.log_decorator.logger") as mock_logger, # noqa: E999 + ): # noqa: E999 with self.assertRaises(Exception): lambda_handler(event={"Records": [{"body": json.dumps([{"": "456"}])}]}, context={}) - expected_first_logger_info_data = {**InvalidValues.Logging_with_no_values} + expected_first_logger_info_data = {**InvalidValues.logging_with_no_values} expected_first_logger_error_data = self.expected_lambda_handler_logs( - success=False, number_of_rows=1, ingestion_complete=False, diagnostics="'NoneType' object has no attribute 'replace'" + success=False, number_of_rows=1, ingestion_complete=False, + diagnostics="'NoneType' object has no attribute 'replace'" ) first_logger_info_call_args = json.loads(self.extract_all_call_args_for_logger_info(mock_logger)[0]) @@ -151,11 +157,13 @@ def test_splunk_logging_missing_data(self): self.assertEqual(first_logger_error_call_args, expected_first_logger_error_data) self.assertEqual( - mock_send_log_to_firehose.call_args_list, - [call(expected_first_logger_info_data), call(expected_first_logger_error_data)], + mock_send_log_to_firehose.call_args_list, [ + call(self.stream_name, expected_first_logger_info_data), + call(self.stream_name, expected_first_logger_error_data) + ], ) - @patch("logging_decorators.send_log_to_firehose") + @patch("common.log_decorator.send_log_to_firehose") def test_splunk_logging_statuscode_diagnostics( self, mock_send_log_to_firehose, @@ -171,10 +179,10 @@ def test_splunk_logging_statuscode_diagnostics( ] for test_case in test_cases: - with ( - patch("logging_decorators.send_log_to_firehose") as mock_send_log_to_firehose, - patch("logging_decorators.logger") as mock_logger, - ): + with ( # noqa: E999 + patch("common.log_decorator.send_log_to_firehose") as mock_send_log_to_firehose, # noqa: E999 + patch("common.log_decorator.logger") as mock_logger, # noqa: E999 + ): # noqa: E999 result = lambda_handler(event=generate_event([{"diagnostics": test_case["diagnostics"]}]), context={}) self.assertEqual(result, EXPECTED_ACK_LAMBDA_RESPONSE_FOR_SUCCESS) @@ -195,17 +203,20 @@ def test_splunk_logging_statuscode_diagnostics( self.assertEqual(second_logger_info_call_args, expected_second_logger_info_data) mock_send_log_to_firehose.assert_has_calls( - [call(expected_first_logger_info_data), call(expected_second_logger_info_data)] + [ + call(self.stream_name, expected_first_logger_info_data), + call(self.stream_name, expected_second_logger_info_data) + ] ) def test_splunk_logging_multiple_rows(self): """Tests logging for multiple objects in the body of the event""" messages = [{"row_id": "test1"}, {"row_id": "test2"}] - with ( - patch("logging_decorators.send_log_to_firehose") as mock_send_log_to_firehose, - patch("logging_decorators.logger") as mock_logger, - ): + with ( # noqa: E999 + patch("common.log_decorator.send_log_to_firehose") as mock_send_log_to_firehose, # noqa: E999 + patch("common.log_decorator.logger") as mock_logger, # noqa: E999 + ): # noqa: E999 result = lambda_handler(generate_event(messages), context={}) self.assertEqual(result, EXPECTED_ACK_LAMBDA_RESPONSE_FOR_SUCCESS) @@ -226,13 +237,13 @@ def test_splunk_logging_multiple_rows(self): mock_send_log_to_firehose.assert_has_calls( [ - call(expected_first_logger_info_data), - call(expected_second_logger_info_data), - call(expected_third_logger_info_data), + call(self.stream_name, expected_first_logger_info_data), + call(self.stream_name, expected_second_logger_info_data), + call(self.stream_name, expected_third_logger_info_data), ] ) - @patch("logging_decorators.send_log_to_firehose") + @patch("common.log_decorator.send_log_to_firehose") def test_splunk_logging_multiple_with_diagnostics( self, mock_send_log_to_firehose, @@ -252,10 +263,10 @@ def test_splunk_logging_multiple_with_diagnostics( {"row_id": "test3", "operation_requested": "DELETE", "diagnostics": DiagnosticsDictionaries.NO_PERMISSIONS}, ] - with ( - patch("logging_decorators.send_log_to_firehose") as mock_send_log_to_firehose, - patch("logging_decorators.logger") as mock_logger, - ): + with ( # noqa: E999 + patch("common.log_decorator.send_log_to_firehose") as mock_send_log_to_firehose, # noqa: E999 + patch("common.log_decorator.logger") as mock_logger, # noqa: E999 + ): # noqa: E999 result = lambda_handler(generate_event(messages), context={}) self.assertEqual(result, EXPECTED_ACK_LAMBDA_RESPONSE_FOR_SUCCESS) @@ -301,10 +312,10 @@ def test_splunk_logging_multiple_with_diagnostics( mock_send_log_to_firehose.assert_has_calls( [ - call(expected_first_logger_info_data), - call(expected_second_logger_info_data), - call(expected_third_logger_info_data), - call(expected_fourth_logger_info_data), + call(self.stream_name, expected_first_logger_info_data), + call(self.stream_name, expected_second_logger_info_data), + call(self.stream_name, expected_third_logger_info_data), + call(self.stream_name, expected_fourth_logger_info_data), ] ) @@ -317,11 +328,12 @@ def test_splunk_update_ack_file_not_logged(self): message_value = "test" + str(i) messages.append({"row_id": message_value}) - with ( - patch("logging_decorators.send_log_to_firehose") as mock_send_log_to_firehose, - patch("logging_decorators.logger") as mock_logger, - patch("update_ack_file.change_audit_table_status_to_processed") as mock_change_audit_table_status_to_processed, - ): + with ( # noqa: E999 + patch("common.log_decorator.send_log_to_firehose") as mock_send_log_to_firehose, # noqa: E999 + patch("common.log_decorator.logger") as mock_logger, # noqa: E999 + patch("update_ack_file.change_audit_table_status_to_processed") + as mock_change_audit_table_status_to_processed, # noqa: E999 + ): # noqa: E999 result = lambda_handler(generate_event(messages), context={}) self.assertEqual(result, EXPECTED_ACK_LAMBDA_RESPONSE_FOR_SUCCESS) @@ -341,13 +353,12 @@ def test_splunk_update_ack_file_not_logged(self): mock_send_log_to_firehose.assert_has_calls( [ - call(secondlast_logger_info_call_args), - call(last_logger_info_call_args), + call(self.stream_name, secondlast_logger_info_call_args), + call(self.stream_name, last_logger_info_call_args), ] ) mock_change_audit_table_status_to_processed.assert_not_called() - def test_splunk_update_ack_file_logged(self): """Tests that update_ack_file is logged if we have sent acks for the whole file""" # send 99 messages @@ -356,11 +367,12 @@ def test_splunk_update_ack_file_logged(self): message_value = "test" + str(i) messages.append({"row_id": message_value}) - with ( - patch("logging_decorators.send_log_to_firehose") as mock_send_log_to_firehose, - patch("logging_decorators.logger") as mock_logger, - patch("update_ack_file.change_audit_table_status_to_processed") as mock_change_audit_table_status_to_processed, - ): + with ( # noqa: E999 + patch("common.log_decorator.send_log_to_firehose") as mock_send_log_to_firehose, # noqa: E999 + patch("common.log_decorator.logger") as mock_logger, # noqa: E999 + patch("update_ack_file.change_audit_table_status_to_processed") + as mock_change_audit_table_status_to_processed, # noqa: E999 + ): # noqa: E999 result = lambda_handler(generate_event(messages), context={}) self.assertEqual(result, EXPECTED_ACK_LAMBDA_RESPONSE_FOR_SUCCESS) @@ -374,7 +386,9 @@ def test_splunk_update_ack_file_logged(self): "message_id": "test1", "time_taken": "1.0s" } - expected_last_logger_info_data = self.expected_lambda_handler_logs(success=True, number_of_rows=99, ingestion_complete=True) + expected_last_logger_info_data = self.expected_lambda_handler_logs( + success=True, number_of_rows=99, ingestion_complete=True + ) all_logger_info_call_args = self.extract_all_call_args_for_logger_info(mock_logger) thirdlast_logger_info_call_args = json.loads(all_logger_info_call_args[98]) @@ -386,9 +400,9 @@ def test_splunk_update_ack_file_logged(self): mock_send_log_to_firehose.assert_has_calls( [ - call(thirdlast_logger_info_call_args), - call(secondlast_logger_info_call_args), - call(last_logger_info_call_args), + call(self.stream_name, thirdlast_logger_info_call_args), + call(self.stream_name, secondlast_logger_info_call_args), + call(self.stream_name, last_logger_info_call_args), ] ) mock_change_audit_table_status_to_processed.assert_called() diff --git a/ack_backend/tests/test_update_ack_file.py b/lambdas/ack_backend/tests/test_update_ack_file.py similarity index 99% rename from ack_backend/tests/test_update_ack_file.py rename to lambdas/ack_backend/tests/test_update_ack_file.py index 9fc87c5f0..12f8b27d4 100644 --- a/ack_backend/tests/test_update_ack_file.py +++ b/lambdas/ack_backend/tests/test_update_ack_file.py @@ -207,5 +207,6 @@ def test_obtain_current_ack_content_file_exists(self): result = obtain_current_ack_content(MOCK_MESSAGE_DETAILS.temp_ack_file_key) self.assertEqual(result.getvalue(), existing_content) + if __name__ == "__main__": unittest.main() diff --git a/ack_backend/tests/test_update_ack_file_flow.py b/lambdas/ack_backend/tests/test_update_ack_file_flow.py similarity index 96% rename from ack_backend/tests/test_update_ack_file_flow.py rename to lambdas/ack_backend/tests/test_update_ack_file_flow.py index a36a6e3a6..9bb3cd75a 100644 --- a/ack_backend/tests/test_update_ack_file_flow.py +++ b/lambdas/ack_backend/tests/test_update_ack_file_flow.py @@ -19,7 +19,10 @@ def setUp(self): self.ack_bucket_patcher = patch('update_ack_file.get_ack_bucket_name', return_value=self.ack_bucket_name) self.mock_get_ack_bucket_name = self.ack_bucket_patcher.start() - self.source_bucket_patcher = patch('update_ack_file.get_source_bucket_name', return_value=self.source_bucket_name) + self.source_bucket_patcher = patch( + 'update_ack_file.get_source_bucket_name', + return_value=self.source_bucket_name + ) self.mock_get_source_bucket_name = self.source_bucket_patcher.start() self.s3_client.create_bucket( diff --git a/ack_backend/tests/utils/generic_setup_and_teardown_for_ack_backend.py b/lambdas/ack_backend/tests/utils/generic_setup_and_teardown_for_ack_backend.py similarity index 100% rename from ack_backend/tests/utils/generic_setup_and_teardown_for_ack_backend.py rename to lambdas/ack_backend/tests/utils/generic_setup_and_teardown_for_ack_backend.py diff --git a/ack_backend/tests/utils/mock_environment_variables.py b/lambdas/ack_backend/tests/utils/mock_environment_variables.py similarity index 100% rename from ack_backend/tests/utils/mock_environment_variables.py rename to lambdas/ack_backend/tests/utils/mock_environment_variables.py diff --git a/ack_backend/tests/utils/utils_for_ack_backend_tests.py b/lambdas/ack_backend/tests/utils/utils_for_ack_backend_tests.py similarity index 96% rename from ack_backend/tests/utils/utils_for_ack_backend_tests.py rename to lambdas/ack_backend/tests/utils/utils_for_ack_backend_tests.py index 5dfb60917..542fb8272 100644 --- a/ack_backend/tests/utils/utils_for_ack_backend_tests.py +++ b/lambdas/ack_backend/tests/utils/utils_for_ack_backend_tests.py @@ -92,8 +92,10 @@ def generate_expected_ack_content( return existing_content -def validate_ack_file_content(s3_client, - incoming_messages: list[dict], existing_file_content: str = ValidValues.ack_headers +def validate_ack_file_content( + s3_client, + incoming_messages: list[dict], + existing_file_content: str = ValidValues.ack_headers ) -> None: """ Obtains the ack file content and ensures that it matches the expected content (expected content is based diff --git a/ack_backend/tests/utils/values_for_ack_backend_tests.py b/lambdas/ack_backend/tests/utils/values_for_ack_backend_tests.py similarity index 99% rename from ack_backend/tests/utils/values_for_ack_backend_tests.py rename to lambdas/ack_backend/tests/utils/values_for_ack_backend_tests.py index ce36f7e6f..346f57351 100644 --- a/ack_backend/tests/utils/values_for_ack_backend_tests.py +++ b/lambdas/ack_backend/tests/utils/values_for_ack_backend_tests.py @@ -246,13 +246,12 @@ class ValidValues: } - class InvalidValues: """Invalid values for use in tests""" fixed_datetime = datetime(2024, 10, 29, 12, 0, 0) - Logging_with_no_values = { + logging_with_no_values = { "function_name": "ack_processor_convert_message_to_ack_row", "date_time": fixed_datetime.strftime("%Y-%m-%d %H:%M:%S"), "operation_start_time": "unknown", diff --git a/lambdas/id_sync/Dockerfile b/lambdas/id_sync/Dockerfile index af5c80164..49db8e7e2 100644 --- a/lambdas/id_sync/Dockerfile +++ b/lambdas/id_sync/Dockerfile @@ -9,9 +9,6 @@ RUN mkdir -p /home/appuser && \ # Install Poetry dependencies # Copy id_sync Poetry files COPY ./id_sync/poetry.lock ./id_sync/pyproject.toml ./ -COPY ./shared/src/common ./src/common - -RUN echo "Listing /var/task after source code copy:" && ls -R /var/task # Install id_sync dependencies WORKDIR /var/task diff --git a/lambdas/redis_sync/Dockerfile b/lambdas/redis_sync/Dockerfile index 9e9b5bdf5..fdfb1c6da 100644 --- a/lambdas/redis_sync/Dockerfile +++ b/lambdas/redis_sync/Dockerfile @@ -9,9 +9,6 @@ RUN mkdir -p /home/appuser && \ # Install Poetry dependencies # Copy redis_sync Poetry files COPY ./redis_sync/poetry.lock ./redis_sync/pyproject.toml ./ -COPY ./shared/src/common ./src/common - -RUN echo "Listing /var/task after source code copy:" && ls -R /var/task # Install redis_sync dependencies WORKDIR /var/task diff --git a/lambdas/shared/.coverage b/lambdas/shared/.coverage deleted file mode 100644 index 96af55847..000000000 Binary files a/lambdas/shared/.coverage and /dev/null differ diff --git a/lambdas/shared/src/common/clients.py b/lambdas/shared/src/common/clients.py index 501418c12..e6a2bde62 100644 --- a/lambdas/shared/src/common/clients.py +++ b/lambdas/shared/src/common/clients.py @@ -12,8 +12,19 @@ REGION_NAME = os.getenv("AWS_REGION", "eu-west-2") s3_client = boto3_client("s3", region_name=REGION_NAME) -firehose_client = boto3_client("firehose", region_name=REGION_NAME) +# for lambdas which require a global s3_client +global_s3_client = None + + +def get_s3_client(): + global global_s3_client + if global_s3_client is None: + global_s3_client = boto3_client("s3", region_name=REGION_NAME) + return global_s3_client + + +firehose_client = boto3_client("firehose", region_name=REGION_NAME) secrets_manager_client = boto3_client("secretsmanager", region_name=REGION_NAME) -dynamodb_resource = boto3_resource("dynamodb", region_name=REGION_NAME) dynamodb_client = boto3_client("dynamodb", region_name=REGION_NAME) +dynamodb_resource = boto3_resource("dynamodb", region_name=REGION_NAME) diff --git a/lambdas/shared/src/common/log_decorator.py b/lambdas/shared/src/common/log_decorator.py index ba35d6e9e..e549978d2 100644 --- a/lambdas/shared/src/common/log_decorator.py +++ b/lambdas/shared/src/common/log_decorator.py @@ -21,11 +21,16 @@ def send_log_to_firehose(stream_name, log_data: dict) -> None: def generate_and_send_logs(stream_name, - start_time, base_log_data: dict, additional_log_data: dict, is_error_log: bool = False + start_time, base_log_data: dict, additional_log_data: dict, + use_ms_precision: bool = False, is_error_log: bool = False ) -> None: """Generates log data which includes the base_log_data, additional_log_data, and time taken (calculated using the current time and given start_time) and sends them to Cloudwatch and Firehose.""" - log_data = {**base_log_data, "time_taken": f"{round(time.time() - start_time, 5)}s", **additional_log_data} + seconds_elapsed = time.time() - start_time + formatted_time_elapsed = f"{round(seconds_elapsed * 1000, 5)}ms" if use_ms_precision else \ + f"{round(seconds_elapsed, 5)}s" + + log_data = {**base_log_data, "time_taken": formatted_time_elapsed, **additional_log_data} log_function = logger.error if is_error_log else logger.info log_function(json.dumps(log_data)) send_log_to_firehose(stream_name, log_data) diff --git a/lambdas/shared/src/common/models/errors.py b/lambdas/shared/src/common/models/errors.py index b926e1d44..7803c21e1 100644 --- a/lambdas/shared/src/common/models/errors.py +++ b/lambdas/shared/src/common/models/errors.py @@ -1,7 +1,6 @@ import uuid from dataclasses import dataclass from enum import Enum -from typing import Union class Severity(str, Enum): @@ -160,7 +159,7 @@ def to_operation_outcome(self) -> dict: class UnhandledResponseError(RuntimeError): """Use this error when the response from an external service (ex: dynamodb) can't be handled""" - response: Union[dict, str] + response: dict | str message: str def __str__(self): @@ -204,6 +203,10 @@ def to_operation_outcome(self) -> dict: pass +class UnhandledAuditTableError(Exception): + """A custom exception for when an unexpected error occurs whilst adding the file to the audit table.""" + + @dataclass class InvalidPatientId(ValidationError): """Use this when NHS Number is invalid or doesn't exist""" diff --git a/lambdas/shared/tests/test_common/test_cache.py b/lambdas/shared/tests/test_common/test_cache.py index 4ce4c0cf3..3e3a2bd19 100644 --- a/lambdas/shared/tests/test_common/test_cache.py +++ b/lambdas/shared/tests/test_common/test_cache.py @@ -1,8 +1,9 @@ import json +import os import tempfile import unittest -from common.cache import Cache +from src.common.cache import Cache class TestCache(unittest.TestCase): @@ -50,6 +51,11 @@ def test_delete(self): value = self.cache.get(key) self.assertIsNone(value) + def test_delete_key_not_found(self): + """it should return None gracefully if key doesn't exist""" + value = self.cache.delete("it-does-not-exist") + self.assertIsNone(value) + def test_write_to_file(self): """it should update the cache file""" value = {"foo": "a-long-foo-so-to-make-sure-truncate-is-working", "bar": 42} @@ -67,3 +73,16 @@ def test_write_to_file(self): with open(self.cache.cache_file.name, "r") as stored: content = json.loads(stored.read()) self.assertDictEqual(content[key], new_value) + + def test_cache_create_empty(self): + """it should gracefully create an empty cache""" + filename = f"{tempfile.gettempdir()}/cache.json" + os.remove(filename) + + # When + self.cache = Cache(tempfile.gettempdir()) + + # Then + with open(self.cache.cache_file.name, "r") as stored: + content = stored.read() + self.assertEqual(len(content), 0) diff --git a/lambdas/shared/tests/test_common/test_clients.py b/lambdas/shared/tests/test_common/test_clients.py index d9680a047..f5f4f6032 100644 --- a/lambdas/shared/tests/test_common/test_clients.py +++ b/lambdas/shared/tests/test_common/test_clients.py @@ -59,3 +59,17 @@ def test_logger_is_initialized(self): def test_logger_set_level(self): """Test that logger level is set to INFO""" self.mock_logger_instance.setLevel.assert_called_once_with(logging.INFO) + + def test_global_s3_client(self): + ''' Test global_s3_client is not initialized on import ''' + importlib.reload(clients) + self.assertEqual(clients.global_s3_client, None) + + def test_global_s3_client_initialization(self): + ''' Test global_s3_client is initialized exactly once even with multiple invocations''' + importlib.reload(clients) + clients.get_s3_client() + self.assertNotEqual(clients.global_s3_client, None) + call_count = self.mock_boto3_client.call_count + clients.get_s3_client() + self.assertEqual(self.mock_boto3_client.call_count, call_count) diff --git a/lambdas/shared/tests/test_common/test_errors.py b/lambdas/shared/tests/test_common/test_errors.py new file mode 100644 index 000000000..778addd21 --- /dev/null +++ b/lambdas/shared/tests/test_common/test_errors.py @@ -0,0 +1,362 @@ +import unittest +from unittest.mock import patch +import src.common.models.errors as errors + + +class TestErrors(unittest.TestCase): + + def setUp(self): + TEST_UUID = "01234567-89ab-cdef-0123-4567890abcde" + # Patch uuid4 + self.uuid4_patch = patch("uuid.uuid4", return_value=TEST_UUID) + self.mock_uuid4 = self.uuid4_patch.start() + self.addCleanup(self.uuid4_patch.stop) + + def assert_response_message(self, context, response, message): + self.assertEqual(context.exception.response, response) + self.assertEqual(context.exception.message, message) + + def assert_resource_type_and_id(self, context, resource_type, resource_id): + self.assertEqual(context.exception.resource_type, resource_type) + self.assertEqual(context.exception.resource_id, resource_id) + + def assert_operation_outcome(self, outcome): + self.assertEqual(outcome.get('resourceType'), "OperationOutcome") + + def test_errors_unauthorized_error(self): + """Test correct operation of UnauthorizedError""" + test_response = "test_response" + test_message = "test_message" + + with self.assertRaises(errors.UnauthorizedError) as context: + raise errors.UnauthorizedError(test_response, test_message) + self.assert_response_message(context, test_response, test_message) + self.assertEqual(str(context.exception), f"{test_message}\n{test_response}") + outcome = context.exception.to_operation_outcome() + self.assert_operation_outcome(outcome) + issue = outcome.get('issue')[0] + self.assertEqual(issue.get('severity'), errors.Severity.error) + self.assertEqual(issue.get('code'), errors.Code.forbidden) + self.assertEqual(issue.get('diagnostics'), "Unauthorized request") + + def test_errors_unauthorized_vax_error(self): + """Test correct operation of UnauthorizedVaxError""" + test_response = "test_response" + test_message = "test_message" + + with self.assertRaises(errors.UnauthorizedVaxError) as context: + raise errors.UnauthorizedVaxError(test_response, test_message) + self.assert_response_message(context, test_response, test_message) + self.assertEqual(str(context.exception), f"{test_message}\n{test_response}") + outcome = context.exception.to_operation_outcome() + self.assert_operation_outcome(outcome) + issue = outcome.get('issue')[0] + self.assertEqual(issue.get('severity'), errors.Severity.error) + self.assertEqual(issue.get('code'), errors.Code.forbidden) + self.assertEqual(issue.get('diagnostics'), "Unauthorized request for vaccine type") + + def test_errors_unauthorized_vax_on_record_error(self): + """Test correct operation of UnauthorizedVaxOnRecordError""" + test_response = "test_response" + test_message = "test_message" + + with self.assertRaises(errors.UnauthorizedVaxOnRecordError) as context: + raise errors.UnauthorizedVaxOnRecordError(test_response, test_message) + self.assert_response_message(context, test_response, test_message) + self.assertEqual(str(context.exception), f"{test_message}\n{test_response}") + outcome = context.exception.to_operation_outcome() + self.assert_operation_outcome(outcome) + issue = outcome.get('issue')[0] + self.assertEqual(issue.get('severity'), errors.Severity.error) + self.assertEqual(issue.get('code'), errors.Code.forbidden) + self.assertEqual( + issue.get('diagnostics'), + "Unauthorized request for vaccine type present in the stored immunization resource" + ) + + def test_errors_token_validation_error(self): + """Test correct operation of TokenValidationError""" + test_response = "test_response" + test_message = "test_message" + + with self.assertRaises(errors.TokenValidationError) as context: + raise errors.TokenValidationError(test_response, test_message) + self.assert_response_message(context, test_response, test_message) + self.assertEqual(str(context.exception), f"{test_message}\n{test_response}") + outcome = context.exception.to_operation_outcome() + self.assert_operation_outcome(outcome) + issue = outcome.get('issue')[0] + self.assertEqual(issue.get('severity'), errors.Severity.error) + self.assertEqual(issue.get('code'), errors.Code.invalid) + self.assertEqual(issue.get('diagnostics'), "Missing/Invalid Token") + + def test_errors_conflict_error(self): + """Test correct operation of ConflictError""" + test_response = "test_response" + test_message = "test_message" + + with self.assertRaises(errors.ConflictError) as context: + raise errors.ConflictError(test_response, test_message) + self.assert_response_message(context, test_response, test_message) + self.assertEqual(str(context.exception), f"{test_message}\n{test_response}") + outcome = context.exception.to_operation_outcome() + self.assert_operation_outcome(outcome) + issue = outcome.get('issue')[0] + self.assertEqual(issue.get('severity'), errors.Severity.error) + self.assertEqual(issue.get('code'), errors.Code.duplicate) + self.assertEqual(issue.get('diagnostics'), "Conflict") + + def test_errors_resource_not_found_error(self): + """Test correct operation of ResourceNotFoundError""" + test_resource_type = "test_resource_type" + test_resource_id = "test_resource_id" + + with self.assertRaises(errors.ResourceNotFoundError) as context: + raise errors.ResourceNotFoundError(test_resource_type, test_resource_id) + self.assert_resource_type_and_id(context, test_resource_type, test_resource_id) + self.assertEqual( + str(context.exception), + f"{test_resource_type} resource does not exist. ID: {test_resource_id}" + ) + outcome = context.exception.to_operation_outcome() + self.assert_operation_outcome(outcome) + issue = outcome.get('issue')[0] + self.assertEqual(issue.get('severity'), errors.Severity.error) + self.assertEqual(issue.get('code'), errors.Code.not_found) + self.assertEqual( + issue.get('diagnostics'), + f"{test_resource_type} resource does not exist. ID: {test_resource_id}" + ) + + def test_errors_resource_found_error(self): + """Test correct operation of ResourceFoundError""" + test_resource_type = "test_resource_type" + test_resource_id = "test_resource_id" + + with self.assertRaises(errors.ResourceFoundError) as context: + raise errors.ResourceFoundError(test_resource_type, test_resource_id) + self.assert_resource_type_and_id(context, test_resource_type, test_resource_id) + self.assertEqual( + str(context.exception), + f"{test_resource_type} resource does exist. ID: {test_resource_id}" + ) + outcome = context.exception.to_operation_outcome() + self.assert_operation_outcome(outcome) + issue = outcome.get('issue')[0] + self.assertEqual(issue.get('severity'), errors.Severity.error) + self.assertEqual(issue.get('code'), errors.Code.not_found) + self.assertEqual( + issue.get('diagnostics'), + f"{test_resource_type} resource does exist. ID: {test_resource_id}" + ) + + def test_errors_unhandled_response_error(self): + """Test correct operation of UnhandledResponseError""" + test_response = "test_response" + test_message = "test_message" + + with self.assertRaises(errors.UnhandledResponseError) as context: + raise errors.UnhandledResponseError(test_response, test_message) + self.assert_response_message(context, test_response, test_message) + self.assertEqual(str(context.exception), f"{test_message}\n{test_response}") + outcome = context.exception.to_operation_outcome() + self.assert_operation_outcome(outcome) + issue = outcome.get('issue')[0] + self.assertEqual(issue.get('severity'), errors.Severity.error) + self.assertEqual(issue.get('code'), errors.Code.exception) + self.assertEqual(issue.get('diagnostics'), f"{test_message}\n{test_response}") + + def test_errors_bad_request_error(self): + """Test correct operation of BadRequestError""" + test_response = "test_response" + test_message = "test_message" + + with self.assertRaises(errors.BadRequestError) as context: + raise errors.BadRequestError(test_response, test_message) + self.assert_response_message(context, test_response, test_message) + self.assertEqual(str(context.exception), f"{test_message}\n{test_response}") + outcome = context.exception.to_operation_outcome() + self.assert_operation_outcome(outcome) + issue = outcome.get('issue')[0] + self.assertEqual(issue.get('severity'), errors.Severity.error) + self.assertEqual(issue.get('code'), errors.Code.incomplete) + self.assertEqual(issue.get('diagnostics'), f"{test_message}\n{test_response}") + + def test_errors_mandatory_error(self): + """Test correct operation of MandatoryError""" + test_message = "test_message" + + with self.assertRaises(errors.MandatoryError) as context: + raise errors.MandatoryError(test_message) + self.assertEqual(str(context.exception.message), test_message) + + def test_errors_mandatory_error_no_message(self): + """Test correct operation of MandatoryError with no message""" + + with self.assertRaises(errors.MandatoryError) as context: + raise errors.MandatoryError() + self.assertIsNone(context.exception.message) + + def test_errors_validation_error(self): + """Test correct operation of ValidationError""" + with self.assertRaises(errors.ValidationError) as context: + raise errors.ValidationError() + outcome = context.exception.to_operation_outcome() + self.assertIsNone(outcome) + + def test_errors_invalid_patient_id(self): + """Test correct operation of InvalidPatientId""" + test_patient_identifier = "test_patient_identifier" + + with self.assertRaises(errors.InvalidPatientId) as context: + raise errors.InvalidPatientId(test_patient_identifier) + self.assertEqual(context.exception.patient_identifier, test_patient_identifier) + self.assertEqual( + str(context.exception), + f"NHS Number: {test_patient_identifier} is invalid or it doesn't exist." + ) + outcome = context.exception.to_operation_outcome() + self.assert_operation_outcome(outcome) + issue = outcome.get('issue')[0] + self.assertEqual(issue.get('severity'), errors.Severity.error) + self.assertEqual(issue.get('code'), errors.Code.exception) + self.assertEqual( + issue.get('diagnostics'), + f"NHS Number: {test_patient_identifier} is invalid or it doesn't exist." + ) + + def test_errors_inconsistent_id_error(self): + """Test correct operation of InconsistentIdError""" + test_imms_id = "test_imms_id" + + with self.assertRaises(errors.InconsistentIdError) as context: + raise errors.InconsistentIdError(test_imms_id) + self.assertEqual(context.exception.imms_id, test_imms_id) + self.assertEqual( + str(context.exception), + f"The provided id:{test_imms_id} doesn't match with the content of the message" + ) + outcome = context.exception.to_operation_outcome() + self.assert_operation_outcome(outcome) + issue = outcome.get('issue')[0] + self.assertEqual(issue.get('severity'), errors.Severity.error) + self.assertEqual(issue.get('code'), errors.Code.exception) + self.assertEqual( + issue.get('diagnostics'), + f"The provided id:{test_imms_id} doesn't match with the content of the message" + ) + + def test_errors_custom_validation_error(self): + """Test correct operation of CustomValidationError""" + test_message = "test_message" + + with self.assertRaises(errors.CustomValidationError) as context: + raise errors.CustomValidationError(test_message) + self.assertEqual(context.exception.message, test_message) + self.assertEqual(str(context.exception), test_message) + outcome = context.exception.to_operation_outcome() + self.assert_operation_outcome(outcome) + issue = outcome.get('issue')[0] + self.assertEqual(issue.get('severity'), errors.Severity.error) + self.assertEqual(issue.get('code'), errors.Code.invariant) + self.assertEqual(issue.get('diagnostics'), test_message) + + def test_errors_identifier_duplication_error(self): + """Test correct operation of IdentifierDuplicationError""" + test_identifier = "test_identifier" + + with self.assertRaises(errors.IdentifierDuplicationError) as context: + raise errors.IdentifierDuplicationError(test_identifier) + self.assertEqual(context.exception.identifier, test_identifier) + self.assertEqual( + str(context.exception), + f"The provided identifier: {test_identifier} is duplicated" + ) + outcome = context.exception.to_operation_outcome() + self.assert_operation_outcome(outcome) + issue = outcome.get('issue')[0] + self.assertEqual(issue.get('severity'), errors.Severity.error) + self.assertEqual(issue.get('code'), errors.Code.duplicate) + self.assertEqual( + issue.get('diagnostics'), + f"The provided identifier: {test_identifier} is duplicated" + ) + + def test_errors_server_error(self): + """Test correct operation of ServerError""" + test_response = "test_response" + test_message = "test_message" + + with self.assertRaises(errors.ServerError) as context: + raise errors.ServerError(test_response, test_message) + self.assert_response_message(context, test_response, test_message) + self.assertEqual(str(context.exception), f"{test_message}\n{test_response}") + outcome = context.exception.to_operation_outcome() + self.assert_operation_outcome(outcome) + issue = outcome.get('issue')[0] + self.assertEqual(issue.get('severity'), errors.Severity.error) + self.assertEqual(issue.get('code'), errors.Code.server_error) + self.assertEqual(issue.get('diagnostics'), f"{test_message}\n{test_response}") + + def test_errors_parameter_exception(self): + """Test correct operation of ParameterException""" + test_message = "test_message" + + with self.assertRaises(errors.ParameterException) as context: + raise errors.ParameterException(test_message) + self.assertEqual(context.exception.message, test_message) + self.assertEqual(str(context.exception), test_message) + + def test_errors_unauthorized_system_error(self): + """Test correct operation of UnauthorizedSystemError""" + test_message = "test_message" + + with self.assertRaises(errors.UnauthorizedSystemError) as context: + raise errors.UnauthorizedSystemError(test_message) + self.assertEqual(context.exception.message, test_message) + self.assertEqual(str(context.exception), test_message) + outcome = context.exception.to_operation_outcome() + self.assert_operation_outcome(outcome) + issue = outcome.get('issue')[0] + self.assertEqual(issue.get('severity'), errors.Severity.error) + self.assertEqual(issue.get('code'), errors.Code.forbidden) + self.assertEqual(issue.get('diagnostics'), test_message) + + def test_errors_unauthorized_system_error_no_message(self): + """Test correct operation of UnauthorizedSystemError with no message""" + + with self.assertRaises(errors.UnauthorizedSystemError) as context: + raise errors.UnauthorizedSystemError() + self.assertEqual(context.exception.message, "Unauthorized system") + self.assertEqual(str(context.exception), "Unauthorized system") + outcome = context.exception.to_operation_outcome() + self.assert_operation_outcome(outcome) + issue = outcome.get('issue')[0] + self.assertEqual(issue.get('severity'), errors.Severity.error) + self.assertEqual(issue.get('code'), errors.Code.forbidden) + self.assertEqual(issue.get('diagnostics'), "Unauthorized system") + + def test_errors_message_not_successful_error(self): + """Test correct operation of MessageNotSuccessfulError""" + test_message = "test_message" + + with self.assertRaises(errors.MessageNotSuccessfulError) as context: + raise errors.MessageNotSuccessfulError(test_message) + self.assertEqual(str(context.exception.message), test_message) + + def test_errors_message_not_successful_error_no_message(self): + """Test correct operation of MessageNotSuccessfulError with no message""" + + with self.assertRaises(errors.MessageNotSuccessfulError) as context: + raise errors.MessageNotSuccessfulError() + self.assertIsNone(context.exception.message) + + def test_errors_record_processor_error(self): + """Test correct operation of RecordProcessorError""" + test_diagnostics = { + "test_diagnostic": "test_value" + } + + with self.assertRaises(errors.RecordProcessorError) as context: + raise errors.RecordProcessorError(test_diagnostics) + self.assertEqual(context.exception.diagnostics_dictionary, test_diagnostics) diff --git a/lambdas/shared/tests/test_common/test_log_decorator.py b/lambdas/shared/tests/test_common/test_log_decorator.py index 3c5a150d7..9ada72bb3 100644 --- a/lambdas/shared/tests/test_common/test_log_decorator.py +++ b/lambdas/shared/tests/test_common/test_log_decorator.py @@ -82,6 +82,30 @@ def test_generate_and_send_logs_success(self, mock_send_log, mock_time): self.mock_logger_error.assert_not_called() mock_send_log.assert_called_once_with(self.test_stream, expected_log_data) + @patch("time.time") + @patch("common.log_decorator.send_log_to_firehose") + def test_generate_and_send_logs_with_ms_precision(self, mock_send_log, mock_time): + """Test generate_and_send_logs using ms precision""" + # Arrange + mock_time.return_value = 1000.5 + start_time = 1000.0 + base_log_data = {"function_name": "test_func", "date_time": "2023-01-01"} + additional_log_data = {"statusCode": 200, "result": "success"} + + # Act + generate_and_send_logs(self.test_stream, start_time, base_log_data, additional_log_data, use_ms_precision=True) + + # Assert + expected_log_data = { + "function_name": "test_func", + "date_time": "2023-01-01", + "time_taken": "500.0ms", + "statusCode": 200, + "result": "success" + } + self.mock_logger_error.assert_not_called() + mock_send_log.assert_called_once_with(self.test_stream, expected_log_data) + @patch("time.time") @patch("common.log_decorator.send_log_to_firehose") def test_generate_and_send_logs_error(self, mock_send_log, mock_time): diff --git a/terraform/ack_lambda.tf b/terraform/ack_lambda.tf index 4997b5ec5..a5d1d09c8 100644 --- a/terraform/ack_lambda.tf +++ b/terraform/ack_lambda.tf @@ -1,8 +1,11 @@ # Define the directory containing the Docker image and calculate its SHA-256 hash for triggering redeployments locals { - ack_lambda_dir = abspath("${path.root}/../ack_backend") - ack_lambda_files = fileset(local.ack_lambda_dir, "**") + ack_lambda_dir = abspath("${path.root}/../lambdas/ack_backend") + + ack_lambda_files = fileset(local.ack_lambda_dir, "**") + ack_lambda_dir_sha = sha1(join("", [for f in local.ack_lambda_files : filesha1("${local.ack_lambda_dir}/${f}")])) + ack_lambda_name = "${local.short_prefix}-ack_lambda" } @@ -18,7 +21,7 @@ resource "aws_ecr_repository" "ack_lambda_repository" { module "ack_processor_docker_image" { source = "terraform-aws-modules/lambda/aws//modules/docker-build" version = "8.0.1" - + docker_file_path = "./ack_backend/Dockerfile" create_ecr_repo = false ecr_repo = aws_ecr_repository.ack_lambda_repository.name ecr_repo_lifecycle_policy = jsonencode({ @@ -40,9 +43,10 @@ module "ack_processor_docker_image" { platform = "linux/amd64" use_image_tag = false - source_path = local.ack_lambda_dir + source_path = abspath("${path.root}/../lambdas") triggers = { - dir_sha = local.ack_lambda_dir_sha + dir_sha = local.ack_lambda_dir_sha + shared_dir_sha = local.shared_dir_sha } }