diff --git a/.github/workflows/base-lambdas-reusable-deploy-all.yml b/.github/workflows/base-lambdas-reusable-deploy-all.yml index 2d937cccc..de9e1079a 100644 --- a/.github/workflows/base-lambdas-reusable-deploy-all.yml +++ b/.github/workflows/base-lambdas-reusable-deploy-all.yml @@ -682,6 +682,19 @@ jobs: lambda_layer_names: "core_lambda_layer" secrets: AWS_ASSUME_ROLE: ${{ secrets.AWS_ASSUME_ROLE }} + + deploy_search_document_review_lambda: + name: Deploy Search Document Review + uses: ./.github/workflows/base-lambdas-reusable-deploy.yml + with: + environment: ${{ inputs.environment }} + python_version: ${{ inputs.python_version }} + build_branch: ${{ inputs.build_branch }} + sandbox: ${{ inputs.sandbox }} + lambda_handler_name: search_document_review_handler + lambda_aws_name: SearchDocumentReview + secrets: + AWS_ASSUME_ROLE: ${{ secrets.AWS_ASSUME_ROLE }} deploy_get_document_reference_by_id_lambda: name: Deploy get_document_reference_lambda diff --git a/lambdas/enums/document_review_accepted_querystring_parameters.py b/lambdas/enums/document_review_accepted_querystring_parameters.py new file mode 100644 index 000000000..77e72145b --- /dev/null +++ b/lambdas/enums/document_review_accepted_querystring_parameters.py @@ -0,0 +1,9 @@ +from enum import StrEnum + + +class DocumentReviewQuerystringParameters(StrEnum): + LIMIT = "limit" + NEXT_PAGE_TOKEN = "nextPageToken" + UPLOADER = "uploader" + NHS_NUMBER = "nhsNumber" + diff --git a/lambdas/enums/lambda_error.py b/lambdas/enums/lambda_error.py index 57005de5e..9f3a21b50 100644 --- a/lambdas/enums/lambda_error.py +++ b/lambdas/enums/lambda_error.py @@ -60,7 +60,6 @@ def create_error_body(self, params: Optional[dict] = None, **kwargs) -> str: "fhir_coding": UKCoreSpineError.VALIDATION_ERROR, } - """ Errors for /DocumentReference """ @@ -68,22 +67,22 @@ def create_error_body(self, params: Optional[dict] = None, **kwargs) -> str: "err_code": "DR_4001", "message": "Missing event body", "fhir_coding": FhirIssueCoding.REQUIRED, - } + } DocRefPayload = { "err_code": "DR_4002", "message": "Invalid json in body", "fhir_coding": FhirIssueCoding.INVALID, - } + } DocRefProps = { "err_code": "DR_4003", "message": "Request body missing some properties", - "fhircoding": FhirIssueCoding.REQUIRED + "fhircoding": FhirIssueCoding.REQUIRED, } DocRefInvalidFiles = { "err_code": "DR_4004", "message": "Invalid files or id", - "fhir_coding": FhirIssueCoding.INVALID - } + "fhir_coding": FhirIssueCoding.INVALID, + } DocRefNoParse = { "err_code": "DR_4005", "message": "Failed to parse document upload request data", @@ -117,7 +116,7 @@ def create_error_body(self, params: Optional[dict] = None, **kwargs) -> str: DocRefUploadInternalError = { "err_code": "DR_5002", "message": "An error occurred when creating pre-signed url for document reference", - "fhir_coding": FhirIssueCoding.EXCEPTION + "fhir_coding": FhirIssueCoding.EXCEPTION, } DocRefPatientSearchInvalid = { "err_code": "DR_5003", @@ -136,12 +135,12 @@ def create_error_body(self, params: Optional[dict] = None, **kwargs) -> str: UpdateDocNHSNumberMismatch = { "err_code": "UDR_5005", "message": "NHS number did not match", - "fhir_coding": FhirIssueCoding.INVARIANT + "fhir_coding": FhirIssueCoding.INVARIANT, } UpdateDocNotLatestVersion = { "err_code": "UDR_5006", "message": "Document is not the latest version", - "fhir_coding": FhirIssueCoding.INVARIANT + "fhir_coding": FhirIssueCoding.INVARIANT, } """ @@ -630,3 +629,26 @@ def create_error_body(self, params: Optional[dict] = None, **kwargs) -> str: "message": "An internal server error occurred", "fhir_coding": FhirIssueCoding.EXCEPTION, } + + """ + Errors for SearchDocumentReviewReference exceptions + """ + DocumentReviewDB = { + "err_code": "SDR_5001", + "message": RETRIEVE_DOCUMENTS, + } + + DocumentReviewValidation = { + "err_code": "SDR_5002", + "message": "Review document model error", + } + + SearchDocumentReviewMissingODS = { + "err_code": "SDR_4001", + "message": "Missing ODS code in request context", + } + + SearchDocumentInvalidQuerystring = { + "err_code": "SDR_4002", + "message": "Invalid query string passed", + } diff --git a/lambdas/handlers/search_document_review_handler.py b/lambdas/handlers/search_document_review_handler.py new file mode 100644 index 000000000..6fc31ec33 --- /dev/null +++ b/lambdas/handlers/search_document_review_handler.py @@ -0,0 +1,118 @@ +import json + +from enums.document_review_accepted_querystring_parameters import ( + DocumentReviewQuerystringParameters, +) +from enums.feature_flags import FeatureFlags +from enums.lambda_error import LambdaError +from services.feature_flags_service import FeatureFlagService +from services.search_document_review_service import SearchDocumentReviewService +from utils.audit_logging_setup import LoggingService +from utils.decorators.ensure_env_var import ensure_environment_variables +from utils.decorators.handle_lambda_exceptions import handle_lambda_exceptions +from utils.decorators.override_error_check import override_error_check +from utils.decorators.set_audit_arg import set_request_context_for_logging +from utils.exceptions import OdsErrorException +from utils.lambda_exceptions import DocumentReviewException +from utils.lambda_response import ApiGatewayResponse +from utils.request_context import request_context + +logger = LoggingService(__name__) + + +@set_request_context_for_logging +@ensure_environment_variables(names=["DOCUMENT_REVIEW_DYNAMODB_NAME"]) +@override_error_check +@handle_lambda_exceptions +def lambda_handler(event, context): + """ + Lambda handler for searching documents pending review by custodian. + Triggered by GET request to /SearchDocumentReview endpoint. + Args: + event: API Gateway event containing query optional query string parameters + QueryStringParameters: limit - Limit for DynamoDB query, defaulted to 50 if not provided, + nhsNumber - Patient NHS number, used for filtering DynamoDB results, + uploader - Author ODS code, used for filtering DynamoDB results, + nextPageToken - Encoded exclusive start key used to query DynamoDB for next page of results. + context: Lambda context + + Returns: + API Gateway response containing Document Review references, number of reference returned, and next page token if present. + 401 - No ODS code or auth token provided in request. + 500 - Document Review Error, Internal Server Error. + + """ + try: + feature_flag_service = FeatureFlagService() + upload_lambda_enabled_flag_object = ( + feature_flag_service.get_feature_flags_by_flag( + FeatureFlags.UPLOAD_DOCUMENT_ITERATION_3_ENABLED + ) + ) + + if not upload_lambda_enabled_flag_object[ + FeatureFlags.UPLOAD_DOCUMENT_ITERATION_3_ENABLED + ]: + logger.info("Feature flag not enabled, event will not be processed") + raise DocumentReviewException(403, LambdaError.FeatureFlagDisabled) + + ods_code = get_ods_code_from_request_context() + + params = parse_querystring_parameters(event) + + search_document_reference_service = SearchDocumentReviewService() + + references, next_page_token = search_document_reference_service.process_request( + params=params, ods_code=ods_code + ) + + response = {"documentReviewReferences": references, "count": len(references)} + + if next_page_token: + response["nextPageToken"] = next_page_token + + return ApiGatewayResponse( + status_code=200, + body=json.dumps(response), + methods="GET", + ).create_api_gateway_response() + + except OdsErrorException as e: + logger.error(e) + return ApiGatewayResponse( + status_code=401, + body=LambdaError.SearchDocumentReviewMissingODS.create_error_body(), + methods="GET", + ).create_api_gateway_response() + + +def get_ods_code_from_request_context(): + logger.info("Getting ODS code from request context") + try: + ods_code = request_context.authorization.get("selected_organisation", {}).get( + "org_ods_code" + ) + if not ods_code: + raise OdsErrorException() + + return ods_code + + except AttributeError as e: + logger.error(e) + raise DocumentReviewException(401, LambdaError.SearchDocumentReviewMissingODS) + + +def parse_querystring_parameters(event): + logger.info("Parsing query string parameters.") + params = event.get("queryStringParameters", {}) + + extracted_params = {} + + if not params: + return extracted_params + + for param in DocumentReviewQuerystringParameters: + if param in params: + extracted_params[param.value] = params.get(param) + + return extracted_params diff --git a/lambdas/services/authoriser_service.py b/lambdas/services/authoriser_service.py index 49f6aa376..43345c836 100644 --- a/lambdas/services/authoriser_service.py +++ b/lambdas/services/authoriser_service.py @@ -117,6 +117,9 @@ def deny_access_policy(self, path, user_role, nhs_number: str = None): case "/Feedback": deny_resource = False + case "/DocumentReview": + deny_resource = False + case "/DocumentStatus": deny_resource = ( not patient_access_is_allowed or is_user_gp_clinical or is_user_pcse diff --git a/lambdas/services/base/dynamo_service.py b/lambdas/services/base/dynamo_service.py index 3daffe33d..1e4c8807c 100644 --- a/lambdas/services/base/dynamo_service.py +++ b/lambdas/services/base/dynamo_service.py @@ -36,15 +36,33 @@ def get_table(self, table_name: str): logger.error(str(e), {"Result": "Unable to connect to DB"}) raise e - def query_table( + def query_table_single( self, - table_name, - search_key, + table_name: str, + search_key: str, search_condition: str, index_name: str | None = None, - requested_fields: list[str] = None, - query_filter: Attr | ConditionBase = None, - ) -> list[dict]: + requested_fields: list[str] | None = None, + query_filter: Attr | ConditionBase | None = None, + limit: int | None = None, + start_key: dict | None = None, + ) -> dict: + """ + Execute a single DynamoDB query and return the full response. + + Args: + table_name: Name of the DynamoDB table + search_key: The partition key name to search on + search_condition: The value to match for the search key + index_name: Optional GSI/LSI name + requested_fields: Optional list of fields to project + query_filter: Optional filter expression + limit: Optional limit on number of items to return + start_key: Optional exclusive start key for pagination + + Returns: + Full DynamoDB query response including Items, LastEvaluatedKey, etc. + """ try: table = self.get_table(table_name) @@ -54,30 +72,75 @@ def query_table( if index_name: query_params["IndexName"] = index_name + if requested_fields: projection_expression = ",".join(requested_fields) query_params["ProjectionExpression"] = projection_expression + if query_filter: query_params["FilterExpression"] = query_filter - items = [] - while True: - results = table.query(**query_params) - if results is None or "Items" not in results: - logger.error(f"Unusable results in DynamoDB: {results!r}") - raise DynamoServiceException("Unrecognised response from DynamoDB") + if start_key: + query_params["ExclusiveStartKey"] = start_key - items += results["Items"] + if limit: + query_params["Limit"] = limit - if "LastEvaluatedKey" in results: - query_params["ExclusiveStartKey"] = results["LastEvaluatedKey"] - else: - break - return items + return table.query(**query_params) except ClientError as e: logger.error(str(e), {"Result": f"Unable to query table: {table_name}"}) raise e + def query_table( + self, + table_name: str, + search_key: str, + search_condition: str, + index_name: str | None = None, + requested_fields: list[str] | None = None, + query_filter: Attr | ConditionBase | None = None, + ) -> list[dict]: + """ + Execute a DynamoDB query and automatically paginate through all results. + + Args: + table_name: Name of the DynamoDB table + search_key: The partition key name to search on + search_condition: The value to match for the search key + index_name: Optional GSI/LSI name + requested_fields: Optional list of fields to project + query_filter: Optional filter expression + + Returns: + List of all items from paginated query results + """ + items = [] + start_key = None + + while True: + results = self.query_table_single( + table_name=table_name, + search_key=search_key, + search_condition=search_condition, + index_name=index_name, + requested_fields=requested_fields, + query_filter=query_filter, + start_key=start_key, + ) + + if results is None or "Items" not in results: + logger.error(f"Unusable results in DynamoDB: {results!r}") + raise DynamoServiceException("Unrecognised response from DynamoDB") + + items += results["Items"] + + if "LastEvaluatedKey" in results: + start_key = results["LastEvaluatedKey"] + else: + break + + return items + def create_item(self, table_name, item): try: table = self.get_table(table_name) diff --git a/lambdas/services/document_upload_review_service.py b/lambdas/services/document_upload_review_service.py index c4c4ee1d2..68375f67f 100644 --- a/lambdas/services/document_upload_review_service.py +++ b/lambdas/services/document_upload_review_service.py @@ -1,14 +1,23 @@ import os +from boto3.dynamodb.conditions import Attr, ConditionBase +from botocore.exceptions import ClientError +from enums.document_review_status import DocumentReviewStatus +from enums.dynamo_filter import AttributeOperator +from enums.lambda_error import LambdaError from models.document_review import DocumentUploadReviewReference +from pydantic import ValidationError from services.document_service import DocumentService from utils.audit_logging_setup import LoggingService +from utils.dynamo_query_filter_builder import DynamoQueryFilterBuilder +from utils.lambda_exceptions import DocumentReviewException logger = LoggingService(__name__) class DocumentUploadReviewService(DocumentService): """Service for handling DocumentUploadReviewReference operations.""" + DEFAULT_QUERY_LIMIT = 50 def __init__(self): super().__init__() self._table_name = os.environ.get("DOCUMENT_REVIEW_DYNAMODB_NAME") @@ -26,6 +35,54 @@ def model_class(self) -> type: def s3_bucket(self) -> str: return self._s3_bucket + def query_docs_pending_review_by_custodian_with_limit( + self, + ods_code: str, + limit: int = DEFAULT_QUERY_LIMIT, + start_key: dict | None = None, + nhs_number: str | None = None, + uploader: str | None = None, + ) -> tuple[list[DocumentUploadReviewReference], dict | None]: + logger.info(f"Getting review document references for custodian: {ods_code}") + + filter_expression = self.build_review_query_filter( + nhs_number=nhs_number, uploader=uploader + ) + + try: + response = self.dynamo_service.query_table_single( + table_name=self.table_name, + search_key="Custodian", + search_condition=ods_code, + index_name="CustodianIndex", + limit=limit, + start_key=start_key, + query_filter=filter_expression, + ) + + references = self._validate_review_references(response["Items"]) + + last_evaluated_key = response.get("LastEvaluatedKey", None) + + return references, last_evaluated_key + + except ClientError as e: + logger.error(e) + raise DocumentReviewException(500, LambdaError.DocumentReviewDB) + + def _validate_review_references( + self, items: list[dict] + ) -> list[DocumentUploadReviewReference]: + try: + logger.info("Validating document review search response") + review_references = [ + self.model_class.model_validate(item) for item in items + ] + return review_references + except ValidationError as e: + logger.error(e) + raise DocumentReviewException(500, LambdaError.DocumentReviewValidation) + def update_document_review_custodian( self, patient_documents: list[DocumentUploadReviewReference], @@ -45,3 +102,21 @@ def update_document_review_custodian( document=review, update_fields_name=review_update_field, ) + + def build_review_query_filter( + self, nhs_number: str | None = None, uploader: str | None = None + ) -> Attr | ConditionBase: + filter_builder = DynamoQueryFilterBuilder() + filter_builder.add_condition( + "ReviewStatus", AttributeOperator.EQUAL, DocumentReviewStatus.PENDING_REVIEW + ) + + if nhs_number: + filter_builder.add_condition( + "NhsNumber", AttributeOperator.EQUAL, nhs_number + ) + + if uploader: + filter_builder.add_condition("Author", AttributeOperator.EQUAL, uploader) + + return filter_builder.build() diff --git a/lambdas/services/search_document_review_service.py b/lambdas/services/search_document_review_service.py new file mode 100644 index 000000000..a0713c991 --- /dev/null +++ b/lambdas/services/search_document_review_service.py @@ -0,0 +1,98 @@ +import base64 +import decimal +import json + +from enums.lambda_error import LambdaError +from pydantic import ValidationError +from services.document_upload_review_service import DocumentUploadReviewService +from utils.audit_logging_setup import LoggingService +from utils.lambda_exceptions import DocumentReviewException + +logger = LoggingService(__name__) + + +class SearchDocumentReviewService: + + def __init__(self): + self.document_service = DocumentUploadReviewService() + + def process_request( + self, ods_code: str, params: dict + ) -> tuple[list[str], str | None]: + try: + + decoded_start_key = self.decode_start_key(params.get("nextPageToken", None)) + + str_limit = params.get("limit", self.document_service.DEFAULT_QUERY_LIMIT) + limit = int(str_limit) + + references, last_evaluated_key = self.get_review_document_references( + start_key=decoded_start_key, + ods_code=ods_code, + limit=limit, + nhs_number=params.get("nhsNumber", None), + uploader=params.get("uploader", None), + ) + output_refs = [ + reference.model_dump( + exclude_none=True, + include={ + "id", + "nhs_number", + "review_reason", + "document_snomed_code_type", + "author", + "upload_date", + }, + mode="json", + ) + for reference in references + ] + + encoded_exclusive_start_key = self.encode_start_key(last_evaluated_key) + + return output_refs, encoded_exclusive_start_key + + except ValidationError as e: + logger.error(e) + raise DocumentReviewException(500, LambdaError.DocumentReviewValidation) + except ValueError as e: + logger.error(e) + raise DocumentReviewException( + 400, LambdaError.SearchDocumentInvalidQuerystring + ) + + def get_review_document_references( + self, + ods_code: str, + limit: int | None = None, + start_key: dict | None = None, + nhs_number: str | None = None, + uploader: str | None = None, + ): + return self.document_service.query_docs_pending_review_by_custodian_with_limit( + ods_code=ods_code, + limit=limit, + start_key=start_key, + nhs_number=nhs_number, + uploader=uploader, + ) + + def decode_start_key(self, encoded_start_key: str | None) -> dict[str, str] | None: + return ( + json.loads( + base64.b64decode(encoded_start_key.encode("ascii")).decode("utf-8") + ) + if encoded_start_key + else None + ) + + def encode_start_key(self, start_key: dict) -> str | None: + if start_key: + for key, value in start_key.items(): + if isinstance(value, decimal.Decimal): + start_key[key] = int(value) + return ( + base64.b64encode(json.dumps(start_key).encode("ascii")).decode("utf-8") + ) + return None diff --git a/lambdas/tests/unit/handlers/test_search_document_review_handler.py b/lambdas/tests/unit/handlers/test_search_document_review_handler.py new file mode 100644 index 000000000..eebfae1f4 --- /dev/null +++ b/lambdas/tests/unit/handlers/test_search_document_review_handler.py @@ -0,0 +1,303 @@ +import base64 +import json + +import pytest +from enums.lambda_error import LambdaError +from handlers.search_document_review_handler import ( + get_ods_code_from_request_context, + lambda_handler, + parse_querystring_parameters, +) +from models.document_review import DocumentUploadReviewReference +from tests.unit.conftest import MOCK_INTERACTION_ID, TEST_CURRENT_GP_ODS, TEST_UUID +from tests.unit.helpers.data.search_document_review.dynamo_response import ( + MOCK_DOCUMENT_REVIEW_SEARCH_RESPONSE, +) +from utils.lambda_exceptions import DocumentReviewException +from utils.lambda_response import ApiGatewayResponse + +TEST_QUERY_LIMIT = "20" +TEST_LAST_EVALUATED_KEY = {"id": TEST_UUID} +TEST_ENCODED_START_KEY = base64.b64encode( + json.dumps(TEST_LAST_EVALUATED_KEY).encode("ascii") +).decode("utf-8") + +MOCK_EMPTY_QUERYSTRING_PARAMS = {} +MOCK_QUERYSTRING_PARAMS_LIMIT = {"limit": TEST_QUERY_LIMIT} +MOCK_QUERYSTRING_PARAMS_LIMIT_KEY = { + "limit": TEST_QUERY_LIMIT, + "nextPageToken": TEST_ENCODED_START_KEY, +} +MOCK_QUERYSTRING_PARAMS_LIMIT_KEY_UPLOAD = { + "uploader": "Z67890", + **MOCK_QUERYSTRING_PARAMS_LIMIT_KEY, +} +MOCK_QUERYSTRING_PARAMS_UNWANTED_PARAM = {"unwanted": "this is not added"} +MOCK_QUERYSTRING_PARAMS_KEY = {"nextPageToken": TEST_ENCODED_START_KEY} + + +@pytest.fixture +def mock_service(mocker, mock_upload_document_iteration_3_enabled): + mocked_class = mocker.patch( + "handlers.search_document_review_handler.SearchDocumentReviewService" + ) + mocked_instance = mocked_class.return_value + yield mocked_instance + + +@pytest.fixture +def event_with_limit(): + return { + "httpMethod": "GET", + "queryStringParameters": MOCK_QUERYSTRING_PARAMS_LIMIT, + "headers": {"Authorization": "Bearer test_token"}, + } + + +@pytest.fixture +def event_with_limit_and_start_key(): + return { + "httpMethod": "GET", + "queryStringParameters": MOCK_QUERYSTRING_PARAMS_LIMIT_KEY, + "headers": {"Authorization": "Bearer test_token"}, + } + + +@pytest.fixture +def event_with_start_key_no_limit(): + return { + "httpMethod": "GET", + "queryStringParameters": MOCK_QUERYSTRING_PARAMS_KEY, + "headers": {"Authorization": "Bearer test_token"}, + } + + +@pytest.fixture +def event_with_unwanted_params(): + return { + "httpMethod": "GET", + "queryStringParameters": MOCK_QUERYSTRING_PARAMS_UNWANTED_PARAM, + "headers": {"Authorization": "Bearer test_token"}, + } + + +@pytest.fixture +def event_with_all_params(): + return { + "httpMethod": "GET", + "queryStringParameters": MOCK_QUERYSTRING_PARAMS_LIMIT_KEY_UPLOAD, + "headers": {"Authorization": "Bearer test_token"}, + } + + +@pytest.fixture() +def mocked_request_context_with_ods(mocker): + mocked_context = mocker.MagicMock() + mocked_context.authorization = { + "selected_organisation": {"org_ods_code": TEST_CURRENT_GP_ODS}, + } + yield mocker.patch( + "handlers.search_document_review_handler.request_context", mocked_context + ) + + +@pytest.fixture() +def mocked_request_context_without_ods(mocker): + mocked_context = mocker.MagicMock() + mocked_context.authorization = { + "selected_organisation": {"org_ods_code": ""}, + } + yield mocker.patch( + "handlers.search_document_review_handler.request_context", mocked_context + ) + + +def test_get_ods_code_from_request(mocked_request_context_with_ods): + + assert get_ods_code_from_request_context() == TEST_CURRENT_GP_ODS + + +def test_get_ods_code_from_request_throws_exception_no_auth(mocker): + mocker.patch("handlers.search_document_review_handler.request_context", {}) + + with pytest.raises(DocumentReviewException) as e: + get_ods_code_from_request_context() + + assert e.value.status_code == 401 + + +def test_handler_returns_401_response_no_ods_code_in_request_context( + set_env, context, event, mock_service, mocked_request_context_without_ods +): + body = json.dumps( + { + "message": LambdaError.SearchDocumentReviewMissingODS.value["message"], + "err_code": LambdaError.SearchDocumentReviewMissingODS.value["err_code"], + "interaction_id": MOCK_INTERACTION_ID, + } + ) + + expected = ApiGatewayResponse( + status_code=401, body=body, methods="GET" + ).create_api_gateway_response() + + actual = lambda_handler(event, context) + + assert actual == expected + + +def test_parse_querystring_parameters( + event_with_limit_and_start_key, + event, + event_with_limit, + event_with_start_key_no_limit, + event_with_unwanted_params, + event_with_all_params, +): + assert ( + parse_querystring_parameters(event_with_limit_and_start_key) + == MOCK_QUERYSTRING_PARAMS_LIMIT_KEY + ) + assert parse_querystring_parameters(event) == {} + assert ( + parse_querystring_parameters(event_with_limit) == MOCK_QUERYSTRING_PARAMS_LIMIT + ) + assert ( + parse_querystring_parameters(event_with_start_key_no_limit) + == MOCK_QUERYSTRING_PARAMS_KEY + ) + assert parse_querystring_parameters(event_with_unwanted_params) == {} + assert ( + parse_querystring_parameters(event_with_all_params) + == MOCK_QUERYSTRING_PARAMS_LIMIT_KEY_UPLOAD + ) + + +def test_process_request_called_with_correct_arguments( + mock_service, + context, + set_env, + event_with_all_params, + mocked_request_context_with_ods, +): + + lambda_handler(event_with_all_params, context) + + params = MOCK_QUERYSTRING_PARAMS_LIMIT_KEY_UPLOAD + + mock_service.process_request.assert_called_with( + params=params, ods_code=TEST_CURRENT_GP_ODS + ) + + +def test_handler_returns_empty_list_of_references_no_dynamo_results_no_limit_in_query_params( + mock_service, context, set_env, mocked_request_context_with_ods, event +): + + mock_service.process_request.return_value = ([], None) + + expected = ApiGatewayResponse( + status_code=200, + body=json.dumps( + { + "documentReviewReferences": [], + "count": 0, + } + ), + methods="GET", + ).create_api_gateway_response() + + actual = lambda_handler(event, context) + + assert actual == expected + + +def test_handler_returns_list_of_references_last_evaluated_key_more_results_available( + mock_service, context, set_env, mocked_request_context_with_ods, event_with_limit +): + + references = [ + DocumentUploadReviewReference.model_validate(item).model_dump( + exclude_none=True, + include={"id", "nhs_number", "review_reason", "document_snomed_code_type"}, + mode="json", + ) + for item in MOCK_DOCUMENT_REVIEW_SEARCH_RESPONSE["Items"] + ] + + mock_service.process_request.return_value = ( + references, + TEST_ENCODED_START_KEY, + ) + + expected = ApiGatewayResponse( + status_code=200, + body=json.dumps( + { + "documentReviewReferences": references, + "count": MOCK_DOCUMENT_REVIEW_SEARCH_RESPONSE["Count"], + "nextPageToken": TEST_ENCODED_START_KEY, + } + ), + methods="GET", + ).create_api_gateway_response() + + actual = lambda_handler(event_with_limit, context) + + assert actual == expected + + +def test_handler_returns_list_of_references_no_limit_passed( + mock_service, context, set_env, mocked_request_context_with_ods, event +): + references = [ + DocumentUploadReviewReference.model_validate(item).model_dump( + exclude_none=True, + include={"id", "nhs_number", "review_reason", "document_snomed_code_type"}, + mode="json", + ) + for item in MOCK_DOCUMENT_REVIEW_SEARCH_RESPONSE["Items"] + ] + + mock_service.process_request.return_value = (references, None) + + expected = ApiGatewayResponse( + status_code=200, + body=json.dumps( + { + "documentReviewReferences": references, + "count": MOCK_DOCUMENT_REVIEW_SEARCH_RESPONSE["Count"], + } + ), + methods="GET", + ).create_api_gateway_response() + + actual = lambda_handler(event, context) + + assert actual == expected + + +def test_handler_returns_500_response_error_raised( + mock_service, context, set_env, mocked_request_context_with_ods, event_with_limit +): + + mock_service.process_request.side_effect = DocumentReviewException( + 500, LambdaError.DocumentReviewValidation + ) + + body = json.dumps( + { + "message": LambdaError.DocumentReviewValidation.value["message"], + "err_code": LambdaError.DocumentReviewValidation.value["err_code"], + "interaction_id": MOCK_INTERACTION_ID, + } + ) + + expected = ApiGatewayResponse( + status_code=500, + body=body, + methods="GET", + ).create_api_gateway_response() + actual = lambda_handler(event_with_limit, context) + + assert actual == expected diff --git a/lambdas/tests/unit/helpers/data/search_document_review/dynamo_response.py b/lambdas/tests/unit/helpers/data/search_document_review/dynamo_response.py new file mode 100644 index 000000000..43080e823 --- /dev/null +++ b/lambdas/tests/unit/helpers/data/search_document_review/dynamo_response.py @@ -0,0 +1,97 @@ +from enums.document_review_status import DocumentReviewStatus +from enums.snomed_codes import SnomedCodes +from tests.unit.conftest import ( + MOCK_STAGING_STORE_BUCKET, + TEST_CURRENT_GP_ODS, + TEST_NHS_NUMBER, + TEST_UUID, +) + +MOCK_PREVIOUS_ODS_CODE = "Z67890" + +MOCK_DOCUMENT_REVIEW_SEARCH_RESPONSE = { + "Items": [ + { + "ID": "3d8683b9-1665-40d2-8499-6e8302d507ff", + "Files": [ + { + "FileLocation": f"s3://{MOCK_STAGING_STORE_BUCKET}/{TEST_NHS_NUMBER}/test-key-123", + "FileName": "document.csv", + }, + { + "FileLocation": f"s3://{MOCK_STAGING_STORE_BUCKET}/{TEST_NHS_NUMBER}/test-key-223", + "FileName": "results.pdf", + }, + ], + "Author": MOCK_PREVIOUS_ODS_CODE, + "Custodian": TEST_CURRENT_GP_ODS, + "UploadDate": 1704110400, + "NhsNumber": TEST_NHS_NUMBER, + "ReviewReason": "Failure", + "ReviewStatus": DocumentReviewStatus.PENDING_REVIEW, + "LastUpdated": 1704110400, # Timestamp: 2024-01-01T12:00:00 + "DocumentSnomedCodeType": SnomedCodes.LLOYD_GEORGE.value.code, + }, + { + "ID": "4d8683b9-1665-40d2-8499-6e8302d507ff", + "Files": [ + { + "FileLocation": f"s3://{MOCK_STAGING_STORE_BUCKET}/{TEST_NHS_NUMBER}/test-key-123", + "FileName": "document.csv", + }, + { + "FileLocation": f"s3://{MOCK_STAGING_STORE_BUCKET}/{TEST_NHS_NUMBER}/test-key-223", + "FileName": "results.pdf", + }, + ], + "Author": MOCK_PREVIOUS_ODS_CODE, + "Custodian": TEST_CURRENT_GP_ODS, + "UploadDate": 1704110400, + "NhsNumber": TEST_NHS_NUMBER, + "ReviewReason": "Failure", + "ReviewStatus": DocumentReviewStatus.PENDING_REVIEW, + "LastUpdated": 1704110400, # Timestamp: 2024-01-01T12:00:00 + "DocumentSnomedCodeType": SnomedCodes.LLOYD_GEORGE.value.code, + }, + { + "ID": "5d8683b9-1665-40d2-8499-6e8302d507ff", + "Files": [ + { + "FileLocation": f"s3://{MOCK_STAGING_STORE_BUCKET}/{TEST_NHS_NUMBER}/test-key-123", + "FileName": "document.csv", + }, + { + "FileLocation": f"s3://{MOCK_STAGING_STORE_BUCKET}/{TEST_NHS_NUMBER}/test-key-223", + "FileName": "results.pdf", + }, + ], + "Author": MOCK_PREVIOUS_ODS_CODE, + "Custodian": TEST_CURRENT_GP_ODS, + "UploadDate": 1704110400, + "Reviewer": None, + "NhsNumber": TEST_NHS_NUMBER, + "ReviewDate": None, + "ReviewReason": "Failure", + "ReviewStatus": DocumentReviewStatus.PENDING_REVIEW, + "LastUpdated": 1704110400, # Timestamp: 2024-01-01T12:00:00 + "DocumentSnomedCodeType": SnomedCodes.LLOYD_GEORGE.value.code, + }, + ], + "Count": 3, + "ScannedCount": 3, + "ResponseMetadata": { + "RequestId": "JHJBP4GU007VMB2V8C9NEKUL8VVV4KQNSO5AEMVJF66Q9ASUAAJG", + "HTTPStatusCode": 200, + "HTTPHeaders": { + "server": "Server", + "date": "Tue, 29 Aug 2023 11:08:21 GMT", + "content-type": "application/x-amz-json-1.0", + "content-length": "510", + "connection": "keep-alive", + "x-amzn-requestid": "JHJBP4GU007VMB2V8C9NEKUL8VVV4KQNSO5AEMVJF66Q9ASUAAJG", + "x-amz-crc32": "820258331", + }, + "RetryAttempts": 0, + }, + "LastEvaluatedKey": TEST_UUID, +} diff --git a/lambdas/tests/unit/services/base/test_dynamo_service.py b/lambdas/tests/unit/services/base/test_dynamo_service.py index ce9594ad6..80ef7dc60 100755 --- a/lambdas/tests/unit/services/base/test_dynamo_service.py +++ b/lambdas/tests/unit/services/base/test_dynamo_service.py @@ -15,6 +15,7 @@ MOCK_PAGINATED_RESPONSE_2, MOCK_PAGINATED_RESPONSE_3, MOCK_RESPONSE, + MOCK_RESPONSE_WITH_LAST_KEY, ) from utils.dynamo_query_filter_builder import DynamoQueryFilterBuilder from utils.exceptions import DynamoServiceException @@ -46,6 +47,13 @@ def mock_scan_method(mock_table): yield scan_method +@pytest.fixture +def mock_query_method(mock_table): + table_instance = mock_table.return_value + query_method = table_instance.query + yield query_method + + @pytest.fixture def mock_filter_expression(): filter_builder = DynamoQueryFilterBuilder() @@ -166,6 +174,214 @@ def test_query_by_index_handles_pagination( mock_table.return_value.query.assert_has_calls(expected_calls) +def test_query_by_index_handles_limits( + mock_service, mock_filter_expression, mock_query_method +): + + mock_query_method.side_effect = [MOCK_PAGINATED_RESPONSE_1] + + expected_result = MOCK_PAGINATED_RESPONSE_1 + + actual = mock_service.query_table_single( + table_name=MOCK_TABLE_NAME, + search_key="NhsNumber", + index_name="NhsNumberIndex", + search_condition=TEST_NHS_NUMBER, + limit=4, + ) + assert expected_result == actual + + +def test_query_table_single_returns_full_response(mock_service, mock_query_method): + mock_query_method.return_value = MOCK_RESPONSE_WITH_LAST_KEY + search_key_obj = Key("NhsNumber").eq(TEST_NHS_NUMBER) + + actual = mock_service.query_table_single( + table_name=MOCK_TABLE_NAME, + search_key="NhsNumber", + search_condition=TEST_NHS_NUMBER, + ) + + assert actual == MOCK_RESPONSE_WITH_LAST_KEY + assert "Items" in actual + assert "LastEvaluatedKey" in actual + mock_query_method.assert_called_once_with( + KeyConditionExpression=search_key_obj, + ) + + +def test_query_table_single_with_all_parameters( + mock_service, mock_query_method, mock_filter_expression +): + mock_query_method.return_value = MOCK_RESPONSE + search_key_obj = Key("NhsNumber").eq(TEST_NHS_NUMBER) + start_key = {"ID": "test_start_key"} + requested_fields = ["FileName", "Created"] + + actual = mock_service.query_table_single( + table_name=MOCK_TABLE_NAME, + search_key="NhsNumber", + search_condition=TEST_NHS_NUMBER, + index_name="NhsNumberIndex", + requested_fields=requested_fields, + query_filter=mock_filter_expression, + limit=10, + start_key=start_key, + ) + + assert actual == MOCK_RESPONSE + mock_query_method.assert_called_once_with( + KeyConditionExpression=search_key_obj, + IndexName="NhsNumberIndex", + ProjectionExpression="FileName,Created", + FilterExpression=mock_filter_expression, + Limit=10, + ExclusiveStartKey=start_key, + ) + + +def test_query_table_single_with_start_key_for_pagination( + mock_service, mock_query_method +): + mock_query_method.return_value = MOCK_PAGINATED_RESPONSE_2 + search_key_obj = Key("NhsNumber").eq(TEST_NHS_NUMBER) + start_key = {"ID": "id_token_for_page_2"} + + actual = mock_service.query_table_single( + table_name=MOCK_TABLE_NAME, + search_key="NhsNumber", + search_condition=TEST_NHS_NUMBER, + start_key=start_key, + ) + + assert actual == MOCK_PAGINATED_RESPONSE_2 + mock_query_method.assert_called_once_with( + KeyConditionExpression=search_key_obj, + ExclusiveStartKey=start_key, + ) + + +def test_query_table_single_client_error_raises_exception( + mock_service, mock_query_method +): + mock_query_method.side_effect = MOCK_CLIENT_ERROR + + with pytest.raises(ClientError) as actual_response: + mock_service.query_table_single( + table_name=MOCK_TABLE_NAME, + search_key="NhsNumber", + search_condition=TEST_NHS_NUMBER, + ) + + assert actual_response.value == MOCK_CLIENT_ERROR + + +def test_query_table_calls_query_table_single_and_returns_items_list( + mock_service, mocker +): + mock_query_table_single = mocker.patch.object( + mock_service, "query_table_single", return_value=MOCK_RESPONSE + ) + + actual = mock_service.query_table( + table_name=MOCK_TABLE_NAME, + search_key="NhsNumber", + search_condition=TEST_NHS_NUMBER, + ) + + assert actual == MOCK_RESPONSE["Items"] + mock_query_table_single.assert_called_once_with( + table_name=MOCK_TABLE_NAME, + search_key="NhsNumber", + search_condition=TEST_NHS_NUMBER, + index_name=None, + requested_fields=None, + query_filter=None, + start_key=None, + ) + + +def test_query_table_handles_pagination_using_query_table_single(mock_service, mocker): + mock_query_table_single = mocker.patch.object( + mock_service, + "query_table_single", + side_effect=[ + MOCK_PAGINATED_RESPONSE_1, + MOCK_PAGINATED_RESPONSE_2, + MOCK_PAGINATED_RESPONSE_3, + ], + ) + + actual = mock_service.query_table( + table_name=MOCK_TABLE_NAME, + search_key="NhsNumber", + search_condition=TEST_NHS_NUMBER, + ) + + assert actual == EXPECTED_ITEMS_FOR_PAGINATED_RESULTS + assert mock_query_table_single.call_count == 3 + + # Verify pagination keys are passed correctly + calls = mock_query_table_single.call_args_list + assert calls[0][1]["start_key"] is None + assert calls[1][1]["start_key"] == {"ID": "id_token_for_page_2"} + assert calls[2][1]["start_key"] == {"ID": "id_token_for_page_3"} + + +def test_query_table_with_all_optional_parameters(mock_service, mocker): + mock_query_table_single = mocker.patch.object( + mock_service, "query_table_single", return_value=MOCK_RESPONSE + ) + filter_expression = Attr("Deleted").eq("") + requested_fields = ["FileName", "Created"] + + actual = mock_service.query_table( + table_name=MOCK_TABLE_NAME, + search_key="NhsNumber", + search_condition=TEST_NHS_NUMBER, + index_name="NhsNumberIndex", + requested_fields=requested_fields, + query_filter=filter_expression, + ) + + assert actual == MOCK_RESPONSE["Items"] + mock_query_table_single.assert_called_once_with( + table_name=MOCK_TABLE_NAME, + search_key="NhsNumber", + search_condition=TEST_NHS_NUMBER, + index_name="NhsNumberIndex", + requested_fields=requested_fields, + query_filter=filter_expression, + start_key=None, + ) + + +def test_query_table_raises_exception_when_response_has_no_items(mock_service, mocker): + mocker.patch.object(mock_service, "query_table_single", return_value={"Count": 0}) + + with pytest.raises(DynamoServiceException) as exc_info: + mock_service.query_table( + table_name=MOCK_TABLE_NAME, + search_key="NhsNumber", + search_condition=TEST_NHS_NUMBER, + ) + + assert "Unrecognised response from DynamoDB" in str(exc_info.value) + + +def test_query_table_raises_exception_when_response_is_none(mock_service, mocker): + mocker.patch.object(mock_service, "query_table_single", return_value=None) + + with pytest.raises(DynamoServiceException) as exc_info: + mock_service.query_table( + table_name=MOCK_TABLE_NAME, + search_key="NhsNumber", + search_condition=TEST_NHS_NUMBER, + ) + + assert "Unrecognised response from DynamoDB" in str(exc_info.value) + + def test_query_with_requested_fields_raises_exception_when_results_are_empty( mock_service, mock_table ): @@ -648,7 +864,7 @@ def test_update_item_with_condition_expression(mock_service, mock_table): update_key = {"ID": "9000000009"} condition_expression = "attribute_exists(FileName)" expression_attribute_values = {":expected_val": "expected_value"} - + expected_update_expression = "SET #FileName_attr = :FileName_val" expected_expr_attr_names = {"#FileName_attr": "FileName"} expected_expr_attr_values = { @@ -659,7 +875,9 @@ def test_update_item_with_condition_expression(mock_service, mock_table): mock_service.update_item( table_name=MOCK_TABLE_NAME, key_pair={"ID": TEST_NHS_NUMBER}, - updated_fields={DocumentReferenceMetadataFields.FILE_NAME.value: "test-filename"}, + updated_fields={ + DocumentReferenceMetadataFields.FILE_NAME.value: "test-filename" + }, condition_expression=condition_expression, expression_attribute_values=expression_attribute_values, ) @@ -681,14 +899,16 @@ def test_batch_writing_is_called_with_correct_parameters(mock_service, mock_tabl {"ID": "id2", "Name": "Item 2"}, {"ID": "id3", "Name": "Item 3"}, ] - - mock_batch_writer = mock_table.return_value.batch_writer.return_value.__enter__.return_value - + + mock_batch_writer = ( + mock_table.return_value.batch_writer.return_value.__enter__.return_value + ) + mock_service.batch_writing(MOCK_TABLE_NAME, items_to_write) mock_table.assert_called_with(MOCK_TABLE_NAME) mock_table.return_value.batch_writer.assert_called_once() - + assert mock_batch_writer.put_item.call_count == 3 for item in items_to_write: mock_batch_writer.put_item.assert_any_call(Item=item) @@ -697,8 +917,10 @@ def test_batch_writing_is_called_with_correct_parameters(mock_service, mock_tabl def test_batch_writing_client_error_raises_exception(mock_service, mock_table): items_to_write = [{"ID": "id1", "Name": "Item 1"}] expected_response = MOCK_CLIENT_ERROR - - mock_table.return_value.batch_writer.return_value.__enter__.side_effect = MOCK_CLIENT_ERROR + + mock_table.return_value.batch_writer.return_value.__enter__.side_effect = ( + MOCK_CLIENT_ERROR + ) with pytest.raises(ClientError) as actual_response: mock_service.batch_writing(MOCK_TABLE_NAME, items_to_write) @@ -708,9 +930,11 @@ def test_batch_writing_client_error_raises_exception(mock_service, mock_table): def test_batch_writing_with_empty_list(mock_service, mock_table): items_to_write = [] - - mock_batch_writer = mock_table.return_value.batch_writer.return_value.__enter__.return_value - + + mock_batch_writer = ( + mock_table.return_value.batch_writer.return_value.__enter__.return_value + ) + mock_service.batch_writing(MOCK_TABLE_NAME, items_to_write) mock_table.assert_called_with(MOCK_TABLE_NAME) @@ -736,7 +960,7 @@ def test_transact_write_items_success(mock_service, mock_dynamo_service): } }, ] - + mock_response = {"ResponseMetadata": {"HTTPStatusCode": 200}} mock_dynamo_service.meta.client.transact_write_items.return_value = mock_response @@ -757,9 +981,12 @@ def test_transact_write_items_transaction_cancelled(mock_service, mock_dynamo_se } } ] - + error_response = { - "Error": {"Code": "TransactionCanceledException", "Message": "Transaction cancelled"}, + "Error": { + "Code": "TransactionCanceledException", + "Message": "Transaction cancelled", + }, "CancellationReasons": [{"Code": "ConditionalCheckFailed"}], } mock_dynamo_service.meta.client.transact_write_items.side_effect = ClientError( @@ -781,7 +1008,7 @@ def test_transact_write_items_generic_client_error(mock_service, mock_dynamo_ser } } ] - + mock_dynamo_service.meta.client.transact_write_items.side_effect = MOCK_CLIENT_ERROR with pytest.raises(ClientError) as exc_info: @@ -802,19 +1029,29 @@ def test_build_update_transaction_item_single_condition(mock_service): assert "Update" in result update_item = result["Update"] - + assert update_item["TableName"] == table_name assert update_item["Key"] == document_key - assert "SET #FileName_attr = :FileName_val, #Deleted_attr = :Deleted_val" == update_item["UpdateExpression"] - assert update_item["ConditionExpression"] == "#DocStatus_attr = :DocStatus_condition_val" - + assert ( + "SET #FileName_attr = :FileName_val, #Deleted_attr = :Deleted_val" + == update_item["UpdateExpression"] + ) + assert ( + update_item["ConditionExpression"] + == "#DocStatus_attr = :DocStatus_condition_val" + ) + assert update_item["ExpressionAttributeNames"]["#FileName_attr"] == "FileName" assert update_item["ExpressionAttributeNames"]["#Deleted_attr"] == "Deleted" assert update_item["ExpressionAttributeNames"]["#DocStatus_attr"] == "DocStatus" - - assert update_item["ExpressionAttributeValues"][":FileName_val"] == "new_filename.pdf" + + assert ( + update_item["ExpressionAttributeValues"][":FileName_val"] == "new_filename.pdf" + ) assert update_item["ExpressionAttributeValues"][":Deleted_val"] == "" - assert update_item["ExpressionAttributeValues"][":DocStatus_condition_val"] == "final" + assert ( + update_item["ExpressionAttributeValues"][":DocStatus_condition_val"] == "final" + ) def test_build_update_transaction_item_multiple_conditions(mock_service): @@ -829,26 +1066,28 @@ def test_build_update_transaction_item_multiple_conditions(mock_service): assert "Update" in result update_item = result["Update"] - + assert update_item["TableName"] == table_name assert update_item["Key"] == document_key - + # Check that all conditions are present (order might vary) condition_expr = update_item["ConditionExpression"] assert "#DocStatus_attr = :DocStatus_condition_val" in condition_expr assert "#Version_attr = :Version_condition_val" in condition_expr assert "#Uploaded_attr = :Uploaded_condition_val" in condition_expr assert condition_expr.count(" AND ") == 2 - + # Check all attribute names are present assert update_item["ExpressionAttributeNames"]["#FileName_attr"] == "FileName" assert update_item["ExpressionAttributeNames"]["#DocStatus_attr"] == "DocStatus" assert update_item["ExpressionAttributeNames"]["#Version_attr"] == "Version" assert update_item["ExpressionAttributeNames"]["#Uploaded_attr"] == "Uploaded" - + # Check all attribute values are present assert update_item["ExpressionAttributeValues"][":FileName_val"] == "updated.pdf" - assert update_item["ExpressionAttributeValues"][":DocStatus_condition_val"] == "final" + assert ( + update_item["ExpressionAttributeValues"][":DocStatus_condition_val"] == "final" + ) assert update_item["ExpressionAttributeValues"][":Version_condition_val"] == 1 assert update_item["ExpressionAttributeValues"][":Uploaded_condition_val"] is True @@ -865,8 +1104,8 @@ def test_build_update_transaction_item_empty_condition_fields(mock_service): assert "Update" in result update_item = result["Update"] - + # With empty condition_fields, condition expression should be empty string assert update_item["ConditionExpression"] == "" assert update_item["TableName"] == table_name - assert update_item["Key"] == document_key \ No newline at end of file + assert update_item["Key"] == document_key diff --git a/lambdas/tests/unit/services/test_document_upload_review_service.py b/lambdas/tests/unit/services/test_document_upload_review_service.py index c4a0b53ce..96b1cc1f2 100644 --- a/lambdas/tests/unit/services/test_document_upload_review_service.py +++ b/lambdas/tests/unit/services/test_document_upload_review_service.py @@ -1,6 +1,7 @@ from unittest.mock import MagicMock import pytest +from boto3.dynamodb.conditions import Attr from models.document_review import DocumentUploadReviewReference from services.document_upload_review_service import DocumentUploadReviewService from tests.unit.conftest import ( @@ -11,6 +12,7 @@ TEST_ODS_CODE = "Y12345" NEW_ODS_CODE = "Z98765" +TEST_QUERY_LIMIT = 50 @pytest.fixture @@ -161,3 +163,110 @@ def test_update_document_review_custodian_single_document(mock_service, mocker): mock_update_document.assert_called_once_with( document=single_review, update_fields_name={"custodian"} ) + + +def test_build_review_query_filter_creates_filter_from_nhs_number(mock_service): + expected = Attr("ReviewStatus").eq("PENDING_REVIEW") & Attr("NhsNumber").eq( + TEST_NHS_NUMBER + ) + actual = mock_service.build_review_query_filter(nhs_number=TEST_NHS_NUMBER) + + assert actual == expected + + +def test_build_review_query_filter_creates_filter_from_uploader_ods_code(mock_service): + expected = Attr("ReviewStatus").eq("PENDING_REVIEW") & Attr("Author").eq( + TEST_ODS_CODE + ) + actual = mock_service.build_review_query_filter(uploader=TEST_ODS_CODE) + + assert actual == expected + + +def test_build_filter_handles_both_nhs_number_and_uploader(mock_service): + expected = ( + Attr("ReviewStatus").eq("PENDING_REVIEW") + & Attr("NhsNumber").eq(TEST_NHS_NUMBER) + & Attr("Author").eq(TEST_ODS_CODE) + ) + actual = mock_service.build_review_query_filter( + nhs_number=TEST_NHS_NUMBER, uploader=TEST_ODS_CODE + ) + + assert actual == expected + + +def test_query_review_documents_queries_dynamodb_with_filter_expression_nhs_number_passed( + mock_service, mocker +): + mock_nhs_number_filter_builder = mocker.patch.object( + mock_service, "build_review_query_filter" + ) + mock_nhs_number_filter_builder.return_value = Attr("NhsNumber").eq(TEST_NHS_NUMBER) + + mock_service.query_docs_pending_review_by_custodian_with_limit( + ods_code=TEST_ODS_CODE, nhs_number=TEST_NHS_NUMBER + ) + + mock_nhs_number_filter_builder.assert_called_with( + nhs_number=TEST_NHS_NUMBER, uploader=None + ) + mock_service.dynamo_service.query_table_single.assert_called_with( + table_name=MOCK_DOCUMENT_REVIEW_TABLE, + search_key="Custodian", + search_condition=TEST_ODS_CODE, + index_name="CustodianIndex", + limit=TEST_QUERY_LIMIT, + start_key=None, + query_filter=mock_nhs_number_filter_builder.return_value, + ) + + +def test_query_review_documents_queries_dynamodb_with_filter_expression_uploader_passed( + mock_service, mocker +): + mock_uploader_filter_builder = mocker.patch.object( + mock_service, "build_review_query_filter" + ) + mock_uploader_filter_builder.return_value = Attr("Author").eq(NEW_ODS_CODE) + mock_service.query_docs_pending_review_by_custodian_with_limit( + ods_code=TEST_ODS_CODE, uploader=NEW_ODS_CODE + ) + mock_uploader_filter_builder.assert_called_with( + nhs_number=None, uploader=NEW_ODS_CODE + ) + mock_service.dynamo_service.query_table_single.assert_called_with( + table_name=MOCK_DOCUMENT_REVIEW_TABLE, + search_key="Custodian", + search_condition=TEST_ODS_CODE, + index_name="CustodianIndex", + limit=TEST_QUERY_LIMIT, + start_key=None, + query_filter=mock_uploader_filter_builder.return_value, + ) + + +def test_query_review_documents_by_custodian_handles_filtering_by_nhs_number_and_uploader( + mock_service, mocker +): + mock_uploader_filter_builder = mocker.patch.object( + mock_service, "build_review_query_filter" + ) + mock_uploader_filter_builder.return_value = Attr("Author").eq(NEW_ODS_CODE) & Attr( + "NhsNumber" + ).eq(TEST_NHS_NUMBER) + mock_service.query_docs_pending_review_by_custodian_with_limit( + ods_code=TEST_ODS_CODE, uploader=NEW_ODS_CODE, nhs_number=TEST_NHS_NUMBER + ) + mock_uploader_filter_builder.assert_called_with( + nhs_number=TEST_NHS_NUMBER, uploader=NEW_ODS_CODE + ) + mock_service.dynamo_service.query_table_single.assert_called_with( + table_name=MOCK_DOCUMENT_REVIEW_TABLE, + search_key="Custodian", + search_condition=TEST_ODS_CODE, + index_name="CustodianIndex", + limit=TEST_QUERY_LIMIT, + start_key=None, + query_filter=mock_uploader_filter_builder.return_value, + ) diff --git a/lambdas/tests/unit/services/test_search_document_review_service.py b/lambdas/tests/unit/services/test_search_document_review_service.py new file mode 100644 index 000000000..f2f4ee396 --- /dev/null +++ b/lambdas/tests/unit/services/test_search_document_review_service.py @@ -0,0 +1,272 @@ +import base64 +import json + +import pytest +from enums.lambda_error import LambdaError +from models.document_review import DocumentUploadReviewReference +from pydantic import ValidationError +from services.search_document_review_service import SearchDocumentReviewService +from tests.unit.conftest import TEST_CURRENT_GP_ODS, TEST_UUID +from tests.unit.helpers.data.search_document_review.dynamo_response import ( + MOCK_DOCUMENT_REVIEW_SEARCH_RESPONSE, +) +from utils.lambda_exceptions import DocumentReviewException + +TEST_QUERY_LIMIT = "20" +TEST_LAST_EVALUATED_KEY = {"id": TEST_UUID, "UploadDate": 0} +TEST_ENCODED_START_KEY = "" +TEST_ENCODED_START_KEY = base64.b64encode( + json.dumps(TEST_LAST_EVALUATED_KEY).encode("ascii") +).decode("utf-8") + +MOCK_QUERYSTRING_PARAMS_LIMIT_KEY = { + "limit": TEST_QUERY_LIMIT, + "nextPageToken": TEST_ENCODED_START_KEY, +} +MOCK_QUERYSTRING_PARAMS_LIMIT_KEY_UPLOADER = { + "uploader": "Z67890", + **MOCK_QUERYSTRING_PARAMS_LIMIT_KEY, +} + +MOCK_QUERYSTRING_PARAMS_INVALID_LIMIT = { + "limit": "not an integer", +} + + +@pytest.fixture +def search_document_review_service(set_env, mocker): + service = SearchDocumentReviewService() + mocker.patch.object(service, "document_service") + yield service + + +def test_handle_gateway_api_request_happy_path(search_document_review_service, mocker): + mocker.patch.object( + search_document_review_service, "decode_start_key" + ).return_value = TEST_LAST_EVALUATED_KEY + + expected_refs = [ + DocumentUploadReviewReference.model_validate(item) + for item in MOCK_DOCUMENT_REVIEW_SEARCH_RESPONSE["Items"] + ] + mocker.patch.object( + search_document_review_service, "get_review_document_references" + ).return_value = ( + expected_refs, + TEST_LAST_EVALUATED_KEY, + ) + + expected = ( + [ + { + "id": MOCK_DOCUMENT_REVIEW_SEARCH_RESPONSE["Items"][0]["ID"], + "review_reason": MOCK_DOCUMENT_REVIEW_SEARCH_RESPONSE["Items"][0][ + "ReviewReason" + ], + "nhs_number": MOCK_DOCUMENT_REVIEW_SEARCH_RESPONSE["Items"][0][ + "NhsNumber" + ], + "document_snomed_code_type": MOCK_DOCUMENT_REVIEW_SEARCH_RESPONSE[ + "Items" + ][0]["DocumentSnomedCodeType"], + "author": MOCK_DOCUMENT_REVIEW_SEARCH_RESPONSE["Items"][0]["Author"], + "upload_date": MOCK_DOCUMENT_REVIEW_SEARCH_RESPONSE["Items"][0][ + "UploadDate" + ], + }, + { + "id": MOCK_DOCUMENT_REVIEW_SEARCH_RESPONSE["Items"][1]["ID"], + "review_reason": MOCK_DOCUMENT_REVIEW_SEARCH_RESPONSE["Items"][1][ + "ReviewReason" + ], + "nhs_number": MOCK_DOCUMENT_REVIEW_SEARCH_RESPONSE["Items"][1][ + "NhsNumber" + ], + "document_snomed_code_type": MOCK_DOCUMENT_REVIEW_SEARCH_RESPONSE[ + "Items" + ][1]["DocumentSnomedCodeType"], + "author": MOCK_DOCUMENT_REVIEW_SEARCH_RESPONSE["Items"][1]["Author"], + "upload_date": MOCK_DOCUMENT_REVIEW_SEARCH_RESPONSE["Items"][1][ + "UploadDate" + ], + }, + { + "id": MOCK_DOCUMENT_REVIEW_SEARCH_RESPONSE["Items"][2]["ID"], + "review_reason": MOCK_DOCUMENT_REVIEW_SEARCH_RESPONSE["Items"][2][ + "ReviewReason" + ], + "nhs_number": MOCK_DOCUMENT_REVIEW_SEARCH_RESPONSE["Items"][2][ + "NhsNumber" + ], + "document_snomed_code_type": MOCK_DOCUMENT_REVIEW_SEARCH_RESPONSE[ + "Items" + ][2]["DocumentSnomedCodeType"], + "author": MOCK_DOCUMENT_REVIEW_SEARCH_RESPONSE["Items"][2]["Author"], + "upload_date": MOCK_DOCUMENT_REVIEW_SEARCH_RESPONSE["Items"][2][ + "UploadDate" + ], + }, + ], + TEST_ENCODED_START_KEY, + ) + + actual = search_document_review_service.process_request( + params=MOCK_QUERYSTRING_PARAMS_LIMIT_KEY_UPLOADER, + ods_code=TEST_CURRENT_GP_ODS, + ) + + search_document_review_service.decode_start_key.assert_called_with( + TEST_ENCODED_START_KEY + ) + search_document_review_service.get_review_document_references.assert_called_with( + ods_code=TEST_CURRENT_GP_ODS, + start_key=TEST_LAST_EVALUATED_KEY, + limit=int(TEST_QUERY_LIMIT), + uploader="Z67890", + nhs_number=None, + ) + + assert actual == expected + + +def test_process_request_handles_invalid_limit_querystring( + search_document_review_service, mocker +): + mocker.patch.object( + search_document_review_service, "decode_start_key" + ).return_value = TEST_LAST_EVALUATED_KEY + + with pytest.raises(DocumentReviewException) as e: + search_document_review_service.process_request( + params=MOCK_QUERYSTRING_PARAMS_INVALID_LIMIT, ods_code=TEST_CURRENT_GP_ODS + ) + assert e.value.status_code == 400 + assert e.value.err_code == "SDR_4002" + assert e.value.message == "Invalid query string passed" + + +def test_process_request_handles_validation_error( + search_document_review_service, mocker +): + mocker.patch.object( + search_document_review_service, "decode_start_key" + ).return_value = TEST_LAST_EVALUATED_KEY + + mocker.patch.object( + search_document_review_service, "get_review_document_references" + ).side_effect = ValidationError("", []) + + with pytest.raises(DocumentReviewException) as e: + search_document_review_service.process_request( + ods_code=TEST_CURRENT_GP_ODS, + params=MOCK_QUERYSTRING_PARAMS_LIMIT_KEY_UPLOADER, + ) + assert e.value.status_code == 500 + assert e.value.err_code == "SDR_5002" + assert e.value.message == "Review document model error" + + +def test_service_queries_document_review_table_with_correct_args( + search_document_review_service, +): + search_document_review_service.get_review_document_references( + TEST_CURRENT_GP_ODS, int(TEST_QUERY_LIMIT), TEST_LAST_EVALUATED_KEY, None, None + ) + + search_document_review_service.document_service.query_docs_pending_review_by_custodian_with_limit.assert_called_with( + ods_code=TEST_CURRENT_GP_ODS, + limit=int(TEST_QUERY_LIMIT), + start_key=TEST_LAST_EVALUATED_KEY, + nhs_number=None, + uploader=None, + ) + + +def test_get_review_document_references_returns_document_references( + search_document_review_service, +): + expected_references = [ + DocumentUploadReviewReference.model_validate(item) + for item in MOCK_DOCUMENT_REVIEW_SEARCH_RESPONSE["Items"] + ] + search_document_review_service.document_service.query_docs_pending_review_by_custodian_with_limit.return_value = ( + expected_references, + TEST_LAST_EVALUATED_KEY, + ) + + actual = search_document_review_service.get_review_document_references( + TEST_CURRENT_GP_ODS, int(TEST_QUERY_LIMIT) + ) + + expected = ( + expected_references, + TEST_LAST_EVALUATED_KEY, + ) + + assert actual == expected + + +def test_get_review_document_references_handles_empty_result( + search_document_review_service, +): + search_document_review_service.document_service.query_docs_pending_review_by_custodian_with_limit.return_value = ( + [], + None, + ) + + actual = search_document_review_service.get_review_document_references( + TEST_CURRENT_GP_ODS, int(TEST_QUERY_LIMIT) + ) + + assert actual == ([], None) + + +def test_get_review_document_references_handles_no_limit_passed( + search_document_review_service, +): + expected_references = [ + DocumentUploadReviewReference.model_validate(item) + for item in MOCK_DOCUMENT_REVIEW_SEARCH_RESPONSE["Items"] + ] + search_document_review_service.document_service.query_docs_pending_review_by_custodian_with_limit.return_value = ( + expected_references, + None, + ) + + expected = ( + expected_references, + None, + ) + + actual = search_document_review_service.get_review_document_references( + TEST_CURRENT_GP_ODS + ) + + assert actual == expected + + +def test_get_review_document_references_throws_exception_client_error( + search_document_review_service, +): + ( + search_document_review_service.document_service.query_docs_pending_review_by_custodian_with_limit + ).side_effect = DocumentReviewException(500, LambdaError.DocumentReviewDB) + + with pytest.raises(DocumentReviewException) as e: + search_document_review_service.get_review_document_references( + TEST_CURRENT_GP_ODS, TEST_QUERY_LIMIT + ) + assert e.value.status_code == 500 + assert e.value.error == LambdaError.DocumentReviewDB + + +def test_decode_start_key(search_document_review_service): + encoded_start_key = TEST_ENCODED_START_KEY + + actual = search_document_review_service.decode_start_key(encoded_start_key) + assert actual == TEST_LAST_EVALUATED_KEY + + +def test_encode_start_key(search_document_review_service): + actual = search_document_review_service.encode_start_key(TEST_LAST_EVALUATED_KEY) + assert actual == TEST_ENCODED_START_KEY diff --git a/lambdas/utils/exceptions.py b/lambdas/utils/exceptions.py index 7f6d3171b..03a6faf66 100644 --- a/lambdas/utils/exceptions.py +++ b/lambdas/utils/exceptions.py @@ -159,9 +159,11 @@ class InvalidFileNameException(Exception): class MetadataPreprocessingException(Exception): pass + class FhirDocumentReferenceException(Exception): pass + class TransactionConflictException(Exception): pass diff --git a/lambdas/utils/lambda_exceptions.py b/lambdas/utils/lambda_exceptions.py index 562f0e298..b53ee4c7f 100644 --- a/lambdas/utils/lambda_exceptions.py +++ b/lambdas/utils/lambda_exceptions.py @@ -109,6 +109,9 @@ class UpdateFhirDocumentReferenceException(LambdaException): pass -class GetDocumentReviewException(LambdaException): +class DocumentReviewException(LambdaException): pass + +class GetDocumentReviewException(LambdaException): + pass