diff --git a/azure/templates/post-deploy.yml b/azure/templates/post-deploy.yml index 5b6a17c79..4f2f44b10 100644 --- a/azure/templates/post-deploy.yml +++ b/azure/templates/post-deploy.yml @@ -225,7 +225,6 @@ steps: displayName: Run full batch test suite workingDirectory: "$(Pipeline.Workspace)/s/$(SERVICE_NAME)/$(SERVICE_ARTIFACT_NAME)/e2e_batch" - condition: eq(1, 2) # Disable task but make this step visible in the pipeline - task: PublishTestResults@2 displayName: 'Publish test results' diff --git a/e2e_batch/Makefile b/e2e_batch/Makefile index f23865d67..eb97587d0 100644 --- a/e2e_batch/Makefile +++ b/e2e_batch/Makefile @@ -1,4 +1,4 @@ -include .env -run-immunization-batch: - ENVIRONMENT=$(environment) poetry run python -m unittest -v -c +test: + ENVIRONMENT=$(ENVIRONMENT) poetry run python -m unittest -v -c \ No newline at end of file diff --git a/e2e_batch/README.md b/e2e_batch/README.md index e69de29bb..47047b543 100644 --- a/e2e_batch/README.md +++ b/e2e_batch/README.md @@ -0,0 +1,38 @@ +# End-to-End Batch Test Suite (test_e2e_batch.py) + +This test suite provides automated end-to-end (E2E) testing for the Immunisation FHIR API batch processing pipeline. It verifies that batch file submissions are correctly processed, acknowledged, and validated across the system. + +## Overview +- Framework: Python unittest +- Purpose: Simulate real-world batch file submissions, poll for acknowledgements, and validate processing results. +- Test Scenarios: Defined in the scenarios module and enabled in setUp(). +- Key Features: +- - Uploads test batch files to S3. +- - Waits for and validates ACK (acknowledgement) files. +- - Cleans up SQS queues and test artifacts after each run. + +## Test Flow +1. Setup (setUp) +- Loads and enables a set of test scenarios. +- Prepares test data for batch submission. +2. Test Execution (test_batch_submission) +- Uploads ALL enabled test files to S3. +- Polls for ALL ACK responses and forwarded files. +- Validates the content and structure of ACK files. +3. Teardown (tearDown) +- Cleans up SQS queues and any generated test files. + +## Key Functions +- send_files(tests): Uploads enabled test files to the S3 input bucket. +- poll_for_responses(tests, max_timeout): Polls for ACKs and processed files, with a timeout. +- validate_responses(tests): Validates the content of ACK files and checks for expected outcomes. + +## How to Run +1. Ensure all dependencies and environment variables are set (see project root README). +2. Update `.env` file with contents indicated in `PR-NNN.env`, modified for PR +3. Update `.env` with referrence to the appropriate AWS config profile `AWS_PROFILE={your-aws-profile}` +4. Update the apigee app to match the required PR-NNN +5. Run tests from vscode debugger or from makefile using +``` +make test +``` \ No newline at end of file diff --git a/e2e_batch/clients.py b/e2e_batch/clients.py index 404070694..474b5293a 100644 --- a/e2e_batch/clients.py +++ b/e2e_batch/clients.py @@ -3,7 +3,10 @@ """ import logging -from constants import (environment, REGION) +from constants import ( + environment, REGION, + batch_fifo_queue_name, ack_metadata_queue_name, audit_table_name + ) from boto3 import client as boto3_client, resource as boto3_resource @@ -13,8 +16,12 @@ s3_client = boto3_client("s3", region_name=REGION) dynamodb = boto3_resource("dynamodb", region_name=REGION) -table_name = f"imms-{environment}-imms-events" -table = dynamodb.Table(table_name) +sqs_client = boto3_client('sqs', region_name=REGION) +events_table_name = f"imms-{environment}-imms-events" +events_table = dynamodb.Table(events_table_name) +audit_table = dynamodb.Table(audit_table_name) +batch_fifo_queue_url = sqs_client.get_queue_url(QueueName=batch_fifo_queue_name)['QueueUrl'] +ack_metadata_queue_url = sqs_client.get_queue_url(QueueName=ack_metadata_queue_name)['QueueUrl'] # Logger logging.basicConfig(level="INFO") logger = logging.getLogger() diff --git a/e2e_batch/constants.py b/e2e_batch/constants.py index 6f3f7cc4e..c522618b3 100644 --- a/e2e_batch/constants.py +++ b/e2e_batch/constants.py @@ -1,5 +1,4 @@ import os -from datetime import datetime, timezone environment = os.environ.get("ENVIRONMENT", "internal-dev") REGION = "eu-west-2" @@ -12,77 +11,87 @@ POST_VALIDATION_ERROR = "Validation errors: contained[?(@.resourceType=='Patient')].name[0].given is a mandatory field" DUPLICATE = "The provided identifier:" ACK_PREFIX = "ack/" +TEMP_ACK_PREFIX = "TempAck/" HEADER_RESPONSE_CODE_COLUMN = "HEADER_RESPONSE_CODE" FILE_NAME_VAL_ERROR = "Infrastructure Level Response Value - Processing Error" CONFIG_BUCKET = "imms-internal-dev-supplier-config" PERMISSIONS_CONFIG_FILE_KEY = "permissions_config.json" +RAVS_URI = "https://www.ravs.england.nhs.uk/" +batch_fifo_queue_name = f"imms-{environment}-batch-file-created-queue.fifo" +ack_metadata_queue_name = f"imms-{environment}-ack-metadata-queue.fifo" +audit_table_name = f"immunisation-batch-{environment}-audit-table" -def create_row(unique_id, fore_name, dose_amount, action_flag, header): - """Helper function to create a single row with the specified UNIQUE_ID and ACTION_FLAG.""" - - return { - header: "9732928395", - "PERSON_FORENAME": fore_name, - "PERSON_SURNAME": "James", - "PERSON_DOB": "20080217", - "PERSON_GENDER_CODE": "0", - "PERSON_POSTCODE": "WD25 0DZ", - "DATE_AND_TIME": datetime.now(timezone.utc).strftime("%Y%m%dT%H%M%S"), - "SITE_CODE": "RVVKC", - "SITE_CODE_TYPE_URI": "https://fhir.nhs.uk/Id/ods-organization-code", - "UNIQUE_ID": unique_id, - "UNIQUE_ID_URI": "https://www.ravs.england.nhs.uk/", - "ACTION_FLAG": action_flag, - "PERFORMING_PROFESSIONAL_FORENAME": "PHYLIS", - "PERFORMING_PROFESSIONAL_SURNAME": "James", - "RECORDED_DATE": datetime.now(timezone.utc).strftime("%Y%m%d"), - "PRIMARY_SOURCE": "TRUE", - "VACCINATION_PROCEDURE_CODE": "956951000000104", - "VACCINATION_PROCEDURE_TERM": "RSV vaccination in pregnancy (procedure)", - "DOSE_SEQUENCE": "1", - "VACCINE_PRODUCT_CODE": "42223111000001107", - "VACCINE_PRODUCT_TERM": "Quadrivalent influenza vaccine (Sanofi Pasteur)", - "VACCINE_MANUFACTURER": "Sanofi Pasteur", - "BATCH_NUMBER": "BN92478105653", - "EXPIRY_DATE": "20240915", - "SITE_OF_VACCINATION_CODE": "368209003", - "SITE_OF_VACCINATION_TERM": "Right arm", - "ROUTE_OF_VACCINATION_CODE": "1210999013", - "ROUTE_OF_VACCINATION_TERM": "Intradermal use", - "DOSE_AMOUNT": dose_amount, - "DOSE_UNIT_CODE": "2622896019", - "DOSE_UNIT_TERM": "Inhalation - unit of product usage", - "INDICATION_CODE": "1037351000000105", - "LOCATION_CODE": "RJC02", - "LOCATION_CODE_TYPE_URI": "https://fhir.nhs.uk/Id/ods-organization-code", - } - - -def create_permissions_json(value): - return { - "all_permissions": { - "DPSFULL": ["RSV_FULL", "COVID19_FULL", "FLU_FULL", "MMR_FULL"], - "DPSREDUCED": ["COVID19_FULL", "FLU_FULL", "MMR_FULL"], - "EMIS": [value, "RSV_FULL"], - "PINNACLE": ["COVID19_UPDATE", "RSV_FULL"], - "SONAR": "", - "TPP": [""], - "AGEM-NIVS": [""], - "NIMS": [""], - "EVA": [""], - "RAVS": [""], - "MEDICAL_DIRECTOR": [""], - "WELSH_DA_1": [""], - "WELSH_DA_2": [""], - "NORTHERN_IRELAND_DA": [""], - "SCOTLAND_DA": [""], - "COVID19_VACCINE_RESOLUTION_SERVICEDESK": [""], - }, - "definitions:": { - "FULL": "Full permissions to create, update and delete a batch record", - "CREATE": "Permission to create a batch record", - "UPDATE": "Permission to update a batch record", - "DELETE": "Permission to delete a batch record", - }, - } +class EventName: + CREATE = "INSERT" + UPDATE = "MODIFY" + DELETE_LOGICAL = "MODIFY" + DELETE_PHYSICAL = "REMOVE" + + +class Operation: + CREATE = "CREATE" + UPDATE = "UPDATE" + DELETE_LOGICAL = "DELETE" + DELETE_PHYSICAL = "REMOVE" + + +class ActionFlag: + CREATE = "NEW" + UPDATE = "UPDATE" + DELETE_LOGICAL = "DELETE" + NONE = "NONE" + + +class InfResult: + SUCCESS = "Success" + PARTIAL_SUCCESS = "Partial Success" + FATAL_ERROR = "Fatal Error" + + +class BusRowResult: + SUCCESS = "OK" + FATAL_ERROR = "Fatal Error" + IMMS_NOT_FOUND = "Immunization resource does not exist" + NONE = "NONE" + + +class OperationOutcome: + IMMS_NOT_FOUND = "Immunization resource does not exist" + TEST = "TEST" + + +class OpMsgs: + VALIDATION_ERROR = "Validation errors" + MISSING_MANDATORY_FIELD = "is a mandatory field" + DOSE_QUANTITY_NOT_NUMBER = "doseQuantity.value must be a number" + IMM_NOT_EXIST = "Immunization resource does not exist" + IDENTIFIER_PROVIDED = "The provided identifier:" + INVALID_DATE_FORMAT = "is not in the correct format" + + +class DestinationType: + INF = ACK_PREFIX + BUS = FORWARDEDFILE_PREFIX + + +class ActionSequence: + def __init__(self, desc: str, actions: list[ActionFlag], outcome: ActionFlag = None): + self.actions = actions + self.description = desc + self.outcome = outcome if outcome else actions[-1] + + +class PermPair: + def __init__(self, ods_code: str, permissions: str): + self.ods_code = ods_code + self.permissions = permissions + + +class TestSet: + CREATE_OK = ActionSequence("Create. OK", [ActionFlag.CREATE]) + UPDATE_OK = ActionSequence("Update. OK", [ActionFlag.CREATE, ActionFlag.UPDATE]) + DELETE_OK = ActionSequence("Delete. OK", [ActionFlag.CREATE, ActionFlag.UPDATE, ActionFlag.DELETE_LOGICAL]) + REINSTATE_OK = ActionSequence("Reinstate. OK", [ActionFlag.CREATE, ActionFlag.DELETE_LOGICAL, ActionFlag.UPDATE]) + DELETE_FAIL = ActionSequence("Delete without Create. Fail", [ActionFlag.DELETE_LOGICAL]) + UPDATE_FAIL = ActionSequence("Update without Create. Fail", [ActionFlag.UPDATE], outcome=ActionFlag.NONE) diff --git a/e2e_batch/poetry.lock b/e2e_batch/poetry.lock index ddb8e18f0..f2cfcc06a 100644 --- a/e2e_batch/poetry.lock +++ b/e2e_batch/poetry.lock @@ -1,19 +1,19 @@ -# This file is automatically @generated by Poetry 2.1.1 and should not be changed by hand. +# This file is automatically @generated by Poetry 2.1.3 and should not be changed by hand. [[package]] name = "boto3" -version = "1.40.28" +version = "1.40.38" description = "The AWS SDK for Python" optional = false python-versions = ">=3.9" groups = ["main"] files = [ - {file = "boto3-1.40.28-py3-none-any.whl", hash = "sha256:fd5cb71b6390e870974e56969e10868f1cf391babeef0b18f91cf8d4f00557cd"}, - {file = "boto3-1.40.28.tar.gz", hash = "sha256:dd44710ab908b0b38cf127053cac83608a15358c85fa267a498e3dbac6fd5789"}, + {file = "boto3-1.40.38-py3-none-any.whl", hash = "sha256:fac337b4f0615e4d6ceee44686e662f51d8e57916ed2bc763468e3e8c611a658"}, + {file = "boto3-1.40.38.tar.gz", hash = "sha256:932ebdd8dbf8ab5694d233df86d5d0950291e0b146c27cb46da8adb4f00f6ca4"}, ] [package.dependencies] -botocore = ">=1.40.28,<1.41.0" +botocore = ">=1.40.38,<1.41.0" jmespath = ">=0.7.1,<2.0.0" s3transfer = ">=0.14.0,<0.15.0" @@ -22,14 +22,14 @@ crt = ["botocore[crt] (>=1.21.0,<2.0a0)"] [[package]] name = "botocore" -version = "1.40.28" +version = "1.40.38" description = "Low-level, data-driven core of boto 3." optional = false python-versions = ">=3.9" groups = ["main"] files = [ - {file = "botocore-1.40.28-py3-none-any.whl", hash = "sha256:fcd393da6cb4d97cff3823d4085cd034d1c80f1cc22a57b1f84d3f863b337a03"}, - {file = "botocore-1.40.28.tar.gz", hash = "sha256:4a26c662dcce2e675209c23cd3a569e137a59fdc9692b8bb9dabed522cbe2d8c"}, + {file = "botocore-1.40.38-py3-none-any.whl", hash = "sha256:7d60a7557db3a58f9394e7ecec1f6b87495ce947eb713f29d53aee83a6e9dc71"}, + {file = "botocore-1.40.38.tar.gz", hash = "sha256:18039009e1eca2bff12e576e8dd3c80cd9b312294f1469c831de03169582ad59"}, ] [package.dependencies] @@ -54,63 +54,67 @@ files = [ [[package]] name = "numpy" -version = "2.3.1" +version = "2.2.3" description = "Fundamental package for array computing in Python" optional = false -python-versions = ">=3.11" +python-versions = ">=3.10" groups = ["main"] files = [ - {file = "numpy-2.3.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6ea9e48336a402551f52cd8f593343699003d2353daa4b72ce8d34f66b722070"}, - {file = "numpy-2.3.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:5ccb7336eaf0e77c1635b232c141846493a588ec9ea777a7c24d7166bb8533ae"}, - {file = "numpy-2.3.1-cp311-cp311-macosx_14_0_arm64.whl", hash = "sha256:0bb3a4a61e1d327e035275d2a993c96fa786e4913aa089843e6a2d9dd205c66a"}, - {file = "numpy-2.3.1-cp311-cp311-macosx_14_0_x86_64.whl", hash = "sha256:e344eb79dab01f1e838ebb67aab09965fb271d6da6b00adda26328ac27d4a66e"}, - {file = "numpy-2.3.1-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:467db865b392168ceb1ef1ffa6f5a86e62468c43e0cfb4ab6da667ede10e58db"}, - {file = "numpy-2.3.1-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:afed2ce4a84f6b0fc6c1ce734ff368cbf5a5e24e8954a338f3bdffa0718adffb"}, - {file = "numpy-2.3.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:0025048b3c1557a20bc80d06fdeb8cc7fc193721484cca82b2cfa072fec71a93"}, - {file = "numpy-2.3.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:a5ee121b60aa509679b682819c602579e1df14a5b07fe95671c8849aad8f2115"}, - {file = "numpy-2.3.1-cp311-cp311-win32.whl", hash = "sha256:a8b740f5579ae4585831b3cf0e3b0425c667274f82a484866d2adf9570539369"}, - {file = "numpy-2.3.1-cp311-cp311-win_amd64.whl", hash = "sha256:d4580adadc53311b163444f877e0789f1c8861e2698f6b2a4ca852fda154f3ff"}, - {file = "numpy-2.3.1-cp311-cp311-win_arm64.whl", hash = "sha256:ec0bdafa906f95adc9a0c6f26a4871fa753f25caaa0e032578a30457bff0af6a"}, - {file = "numpy-2.3.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:2959d8f268f3d8ee402b04a9ec4bb7604555aeacf78b360dc4ec27f1d508177d"}, - {file = "numpy-2.3.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:762e0c0c6b56bdedfef9a8e1d4538556438288c4276901ea008ae44091954e29"}, - {file = "numpy-2.3.1-cp312-cp312-macosx_14_0_arm64.whl", hash = "sha256:867ef172a0976aaa1f1d1b63cf2090de8b636a7674607d514505fb7276ab08fc"}, - {file = "numpy-2.3.1-cp312-cp312-macosx_14_0_x86_64.whl", hash = "sha256:4e602e1b8682c2b833af89ba641ad4176053aaa50f5cacda1a27004352dde943"}, - {file = "numpy-2.3.1-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:8e333040d069eba1652fb08962ec5b76af7f2c7bce1df7e1418c8055cf776f25"}, - {file = "numpy-2.3.1-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:e7cbf5a5eafd8d230a3ce356d892512185230e4781a361229bd902ff403bc660"}, - {file = "numpy-2.3.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:5f1b8f26d1086835f442286c1d9b64bb3974b0b1e41bb105358fd07d20872952"}, - {file = "numpy-2.3.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:ee8340cb48c9b7a5899d1149eece41ca535513a9698098edbade2a8e7a84da77"}, - {file = "numpy-2.3.1-cp312-cp312-win32.whl", hash = "sha256:e772dda20a6002ef7061713dc1e2585bc1b534e7909b2030b5a46dae8ff077ab"}, - {file = "numpy-2.3.1-cp312-cp312-win_amd64.whl", hash = "sha256:cfecc7822543abdea6de08758091da655ea2210b8ffa1faf116b940693d3df76"}, - {file = "numpy-2.3.1-cp312-cp312-win_arm64.whl", hash = "sha256:7be91b2239af2658653c5bb6f1b8bccafaf08226a258caf78ce44710a0160d30"}, - {file = "numpy-2.3.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:25a1992b0a3fdcdaec9f552ef10d8103186f5397ab45e2d25f8ac51b1a6b97e8"}, - {file = "numpy-2.3.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:7dea630156d39b02a63c18f508f85010230409db5b2927ba59c8ba4ab3e8272e"}, - {file = "numpy-2.3.1-cp313-cp313-macosx_14_0_arm64.whl", hash = "sha256:bada6058dd886061f10ea15f230ccf7dfff40572e99fef440a4a857c8728c9c0"}, - {file = "numpy-2.3.1-cp313-cp313-macosx_14_0_x86_64.whl", hash = "sha256:a894f3816eb17b29e4783e5873f92faf55b710c2519e5c351767c51f79d8526d"}, - {file = "numpy-2.3.1-cp313-cp313-manylinux_2_28_aarch64.whl", hash = "sha256:18703df6c4a4fee55fd3d6e5a253d01c5d33a295409b03fda0c86b3ca2ff41a1"}, - {file = "numpy-2.3.1-cp313-cp313-manylinux_2_28_x86_64.whl", hash = "sha256:5902660491bd7a48b2ec16c23ccb9124b8abfd9583c5fdfa123fe6b421e03de1"}, - {file = "numpy-2.3.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:36890eb9e9d2081137bd78d29050ba63b8dab95dff7912eadf1185e80074b2a0"}, - {file = "numpy-2.3.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:a780033466159c2270531e2b8ac063704592a0bc62ec4a1b991c7c40705eb0e8"}, - {file = "numpy-2.3.1-cp313-cp313-win32.whl", hash = "sha256:39bff12c076812595c3a306f22bfe49919c5513aa1e0e70fac756a0be7c2a2b8"}, - {file = "numpy-2.3.1-cp313-cp313-win_amd64.whl", hash = "sha256:8d5ee6eec45f08ce507a6570e06f2f879b374a552087a4179ea7838edbcbfa42"}, - {file = "numpy-2.3.1-cp313-cp313-win_arm64.whl", hash = "sha256:0c4d9e0a8368db90f93bd192bfa771ace63137c3488d198ee21dfb8e7771916e"}, - {file = "numpy-2.3.1-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:b0b5397374f32ec0649dd98c652a1798192042e715df918c20672c62fb52d4b8"}, - {file = "numpy-2.3.1-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:c5bdf2015ccfcee8253fb8be695516ac4457c743473a43290fd36eba6a1777eb"}, - {file = "numpy-2.3.1-cp313-cp313t-macosx_14_0_arm64.whl", hash = "sha256:d70f20df7f08b90a2062c1f07737dd340adccf2068d0f1b9b3d56e2038979fee"}, - {file = "numpy-2.3.1-cp313-cp313t-macosx_14_0_x86_64.whl", hash = "sha256:2fb86b7e58f9ac50e1e9dd1290154107e47d1eef23a0ae9145ded06ea606f992"}, - {file = "numpy-2.3.1-cp313-cp313t-manylinux_2_28_aarch64.whl", hash = "sha256:23ab05b2d241f76cb883ce8b9a93a680752fbfcbd51c50eff0b88b979e471d8c"}, - {file = "numpy-2.3.1-cp313-cp313t-manylinux_2_28_x86_64.whl", hash = "sha256:ce2ce9e5de4703a673e705183f64fd5da5bf36e7beddcb63a25ee2286e71ca48"}, - {file = "numpy-2.3.1-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:c4913079974eeb5c16ccfd2b1f09354b8fed7e0d6f2cab933104a09a6419b1ee"}, - {file = "numpy-2.3.1-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:010ce9b4f00d5c036053ca684c77441f2f2c934fd23bee058b4d6f196efd8280"}, - {file = "numpy-2.3.1-cp313-cp313t-win32.whl", hash = "sha256:6269b9edfe32912584ec496d91b00b6d34282ca1d07eb10e82dfc780907d6c2e"}, - {file = "numpy-2.3.1-cp313-cp313t-win_amd64.whl", hash = "sha256:2a809637460e88a113e186e87f228d74ae2852a2e0c44de275263376f17b5bdc"}, - {file = "numpy-2.3.1-cp313-cp313t-win_arm64.whl", hash = "sha256:eccb9a159db9aed60800187bc47a6d3451553f0e1b08b068d8b277ddfbb9b244"}, - {file = "numpy-2.3.1-pp311-pypy311_pp73-macosx_10_15_x86_64.whl", hash = "sha256:ad506d4b09e684394c42c966ec1527f6ebc25da7f4da4b1b056606ffe446b8a3"}, - {file = "numpy-2.3.1-pp311-pypy311_pp73-macosx_14_0_arm64.whl", hash = "sha256:ebb8603d45bc86bbd5edb0d63e52c5fd9e7945d3a503b77e486bd88dde67a19b"}, - {file = "numpy-2.3.1-pp311-pypy311_pp73-macosx_14_0_x86_64.whl", hash = "sha256:15aa4c392ac396e2ad3d0a2680c0f0dee420f9fed14eef09bdb9450ee6dcb7b7"}, - {file = "numpy-2.3.1-pp311-pypy311_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:c6e0bf9d1a2f50d2b65a7cf56db37c095af17b59f6c132396f7c6d5dd76484df"}, - {file = "numpy-2.3.1-pp311-pypy311_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:eabd7e8740d494ce2b4ea0ff05afa1b7b291e978c0ae075487c51e8bd93c0c68"}, - {file = "numpy-2.3.1-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:e610832418a2bc09d974cc9fecebfa51e9532d6190223bc5ef6a7402ebf3b5cb"}, - {file = "numpy-2.3.1.tar.gz", hash = "sha256:1ec9ae20a4226da374362cca3c62cd753faf2f951440b0e3b98e93c235441d2b"}, + {file = "numpy-2.2.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:cbc6472e01952d3d1b2772b720428f8b90e2deea8344e854df22b0618e9cce71"}, + {file = "numpy-2.2.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:cdfe0c22692a30cd830c0755746473ae66c4a8f2e7bd508b35fb3b6a0813d787"}, + {file = "numpy-2.2.3-cp310-cp310-macosx_14_0_arm64.whl", hash = "sha256:e37242f5324ffd9f7ba5acf96d774f9276aa62a966c0bad8dae692deebec7716"}, + {file = "numpy-2.2.3-cp310-cp310-macosx_14_0_x86_64.whl", hash = "sha256:95172a21038c9b423e68be78fd0be6e1b97674cde269b76fe269a5dfa6fadf0b"}, + {file = "numpy-2.2.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d5b47c440210c5d1d67e1cf434124e0b5c395eee1f5806fdd89b553ed1acd0a3"}, + {file = "numpy-2.2.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0391ea3622f5c51a2e29708877d56e3d276827ac5447d7f45e9bc4ade8923c52"}, + {file = "numpy-2.2.3-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:f6b3dfc7661f8842babd8ea07e9897fe3d9b69a1d7e5fbb743e4160f9387833b"}, + {file = "numpy-2.2.3-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:1ad78ce7f18ce4e7df1b2ea4019b5817a2f6a8a16e34ff2775f646adce0a5027"}, + {file = "numpy-2.2.3-cp310-cp310-win32.whl", hash = "sha256:5ebeb7ef54a7be11044c33a17b2624abe4307a75893c001a4800857956b41094"}, + {file = "numpy-2.2.3-cp310-cp310-win_amd64.whl", hash = "sha256:596140185c7fa113563c67c2e894eabe0daea18cf8e33851738c19f70ce86aeb"}, + {file = "numpy-2.2.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:16372619ee728ed67a2a606a614f56d3eabc5b86f8b615c79d01957062826ca8"}, + {file = "numpy-2.2.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:5521a06a3148686d9269c53b09f7d399a5725c47bbb5b35747e1cb76326b714b"}, + {file = "numpy-2.2.3-cp311-cp311-macosx_14_0_arm64.whl", hash = "sha256:7c8dde0ca2f77828815fd1aedfdf52e59071a5bae30dac3b4da2a335c672149a"}, + {file = "numpy-2.2.3-cp311-cp311-macosx_14_0_x86_64.whl", hash = "sha256:77974aba6c1bc26e3c205c2214f0d5b4305bdc719268b93e768ddb17e3fdd636"}, + {file = "numpy-2.2.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d42f9c36d06440e34226e8bd65ff065ca0963aeecada587b937011efa02cdc9d"}, + {file = "numpy-2.2.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f2712c5179f40af9ddc8f6727f2bd910ea0eb50206daea75f58ddd9fa3f715bb"}, + {file = "numpy-2.2.3-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:c8b0451d2ec95010d1db8ca733afc41f659f425b7f608af569711097fd6014e2"}, + {file = "numpy-2.2.3-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:d9b4a8148c57ecac25a16b0e11798cbe88edf5237b0df99973687dd866f05e1b"}, + {file = "numpy-2.2.3-cp311-cp311-win32.whl", hash = "sha256:1f45315b2dc58d8a3e7754fe4e38b6fce132dab284a92851e41b2b344f6441c5"}, + {file = "numpy-2.2.3-cp311-cp311-win_amd64.whl", hash = "sha256:9f48ba6f6c13e5e49f3d3efb1b51c8193215c42ac82610a04624906a9270be6f"}, + {file = "numpy-2.2.3-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:12c045f43b1d2915eca6b880a7f4a256f59d62df4f044788c8ba67709412128d"}, + {file = "numpy-2.2.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:87eed225fd415bbae787f93a457af7f5990b92a334e346f72070bf569b9c9c95"}, + {file = "numpy-2.2.3-cp312-cp312-macosx_14_0_arm64.whl", hash = "sha256:712a64103d97c404e87d4d7c47fb0c7ff9acccc625ca2002848e0d53288b90ea"}, + {file = "numpy-2.2.3-cp312-cp312-macosx_14_0_x86_64.whl", hash = "sha256:a5ae282abe60a2db0fd407072aff4599c279bcd6e9a2475500fc35b00a57c532"}, + {file = "numpy-2.2.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5266de33d4c3420973cf9ae3b98b54a2a6d53a559310e3236c4b2b06b9c07d4e"}, + {file = "numpy-2.2.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3b787adbf04b0db1967798dba8da1af07e387908ed1553a0d6e74c084d1ceafe"}, + {file = "numpy-2.2.3-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:34c1b7e83f94f3b564b35f480f5652a47007dd91f7c839f404d03279cc8dd021"}, + {file = "numpy-2.2.3-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:4d8335b5f1b6e2bce120d55fb17064b0262ff29b459e8493d1785c18ae2553b8"}, + {file = "numpy-2.2.3-cp312-cp312-win32.whl", hash = "sha256:4d9828d25fb246bedd31e04c9e75714a4087211ac348cb39c8c5f99dbb6683fe"}, + {file = "numpy-2.2.3-cp312-cp312-win_amd64.whl", hash = "sha256:83807d445817326b4bcdaaaf8e8e9f1753da04341eceec705c001ff342002e5d"}, + {file = "numpy-2.2.3-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:7bfdb06b395385ea9b91bf55c1adf1b297c9fdb531552845ff1d3ea6e40d5aba"}, + {file = "numpy-2.2.3-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:23c9f4edbf4c065fddb10a4f6e8b6a244342d95966a48820c614891e5059bb50"}, + {file = "numpy-2.2.3-cp313-cp313-macosx_14_0_arm64.whl", hash = "sha256:a0c03b6be48aaf92525cccf393265e02773be8fd9551a2f9adbe7db1fa2b60f1"}, + {file = "numpy-2.2.3-cp313-cp313-macosx_14_0_x86_64.whl", hash = "sha256:2376e317111daa0a6739e50f7ee2a6353f768489102308b0d98fcf4a04f7f3b5"}, + {file = "numpy-2.2.3-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8fb62fe3d206d72fe1cfe31c4a1106ad2b136fcc1606093aeab314f02930fdf2"}, + {file = "numpy-2.2.3-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:52659ad2534427dffcc36aac76bebdd02b67e3b7a619ac67543bc9bfe6b7cdb1"}, + {file = "numpy-2.2.3-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:1b416af7d0ed3271cad0f0a0d0bee0911ed7eba23e66f8424d9f3dfcdcae1304"}, + {file = "numpy-2.2.3-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:1402da8e0f435991983d0a9708b779f95a8c98c6b18a171b9f1be09005e64d9d"}, + {file = "numpy-2.2.3-cp313-cp313-win32.whl", hash = "sha256:136553f123ee2951bfcfbc264acd34a2fc2f29d7cdf610ce7daf672b6fbaa693"}, + {file = "numpy-2.2.3-cp313-cp313-win_amd64.whl", hash = "sha256:5b732c8beef1d7bc2d9e476dbba20aaff6167bf205ad9aa8d30913859e82884b"}, + {file = "numpy-2.2.3-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:435e7a933b9fda8126130b046975a968cc2d833b505475e588339e09f7672890"}, + {file = "numpy-2.2.3-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:7678556eeb0152cbd1522b684dcd215250885993dd00adb93679ec3c0e6e091c"}, + {file = "numpy-2.2.3-cp313-cp313t-macosx_14_0_arm64.whl", hash = "sha256:2e8da03bd561504d9b20e7a12340870dfc206c64ea59b4cfee9fceb95070ee94"}, + {file = "numpy-2.2.3-cp313-cp313t-macosx_14_0_x86_64.whl", hash = "sha256:c9aa4496fd0e17e3843399f533d62857cef5900facf93e735ef65aa4bbc90ef0"}, + {file = "numpy-2.2.3-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f4ca91d61a4bf61b0f2228f24bbfa6a9facd5f8af03759fe2a655c50ae2c6610"}, + {file = "numpy-2.2.3-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:deaa09cd492e24fd9b15296844c0ad1b3c976da7907e1c1ed3a0ad21dded6f76"}, + {file = "numpy-2.2.3-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:246535e2f7496b7ac85deffe932896a3577be7af8fb7eebe7146444680297e9a"}, + {file = "numpy-2.2.3-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:daf43a3d1ea699402c5a850e5313680ac355b4adc9770cd5cfc2940e7861f1bf"}, + {file = "numpy-2.2.3-cp313-cp313t-win32.whl", hash = "sha256:cf802eef1f0134afb81fef94020351be4fe1d6681aadf9c5e862af6602af64ef"}, + {file = "numpy-2.2.3-cp313-cp313t-win_amd64.whl", hash = "sha256:aee2512827ceb6d7f517c8b85aa5d3923afe8fc7a57d028cffcd522f1c6fd082"}, + {file = "numpy-2.2.3-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:3c2ec8a0f51d60f1e9c0c5ab116b7fc104b165ada3f6c58abf881cb2eb16044d"}, + {file = "numpy-2.2.3-pp310-pypy310_pp73-macosx_14_0_x86_64.whl", hash = "sha256:ed2cf9ed4e8ebc3b754d398cba12f24359f018b416c380f577bbae112ca52fc9"}, + {file = "numpy-2.2.3-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:39261798d208c3095ae4f7bc8eaeb3481ea8c6e03dc48028057d3cbdbdb8937e"}, + {file = "numpy-2.2.3-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:783145835458e60fa97afac25d511d00a1eca94d4a8f3ace9fe2043003c678e4"}, + {file = "numpy-2.2.3.tar.gz", hash = "sha256:dbdc15f0c81611925f382dfa97b3bd0bc2c1ce19d4fe50482cb0ddc12ba30020"}, ] [[package]] @@ -213,14 +217,14 @@ six = ">=1.5" [[package]] name = "pytz" -version = "2025.2" +version = "2025.1" description = "World timezone definitions, modern and historical" optional = false python-versions = "*" groups = ["main"] files = [ - {file = "pytz-2025.2-py2.py3-none-any.whl", hash = "sha256:5ddf76296dd8c44c26eb8f4b6f35488f3ccbf6fbbd7adee0b7262d43f0ec2f00"}, - {file = "pytz-2025.2.tar.gz", hash = "sha256:360b9e3dbb49a209c21ad61809c7fb453643e048b38924c765813546746e81c3"}, + {file = "pytz-2025.1-py2.py3-none-any.whl", hash = "sha256:89dd22dca55b46eac6eda23b2d72721bf1bdfef212645d81513ef5d03038de57"}, + {file = "pytz-2025.1.tar.gz", hash = "sha256:c2db42be2a2518b28e65f9207c4d05e6ff547d1efa4086469ef855e4ab70178e"}, ] [[package]] @@ -255,33 +259,32 @@ files = [ [[package]] name = "tzdata" -version = "2025.2" +version = "2025.1" description = "Provider of IANA time zone data" optional = false python-versions = ">=2" groups = ["main"] files = [ - {file = "tzdata-2025.2-py2.py3-none-any.whl", hash = "sha256:1a403fada01ff9221ca8044d701868fa132215d84beb92242d9acd2147f667a8"}, - {file = "tzdata-2025.2.tar.gz", hash = "sha256:b60a638fcc0daffadf82fe0f57e53d06bdec2f36c4df66280ae79bce6bd6f2b9"}, + {file = "tzdata-2025.1-py2.py3-none-any.whl", hash = "sha256:7e127113816800496f027041c570f50bcd464a020098a3b6b199517772303639"}, + {file = "tzdata-2025.1.tar.gz", hash = "sha256:24894909e88cdb28bd1636c6887801df64cb485bd593f2fd83ef29075a81d694"}, ] [[package]] name = "urllib3" -version = "2.5.0" +version = "1.26.20" description = "HTTP library with thread-safe connection pooling, file post, and more." optional = false -python-versions = ">=3.9" +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,>=2.7" groups = ["main"] files = [ - {file = "urllib3-2.5.0-py3-none-any.whl", hash = "sha256:e6b01673c0fa6a13e374b50871808eb3bf7046c4b125b216f6bf1cc604cff0dc"}, - {file = "urllib3-2.5.0.tar.gz", hash = "sha256:3fc47733c7e419d4bc3f6b3dc2b4f890bb743906a30d56ba4a5bfa4bbff92760"}, + {file = "urllib3-1.26.20-py2.py3-none-any.whl", hash = "sha256:0ed14ccfbf1c30a9072c7ca157e4319b70d65f623e91e7b32fadb2853431016e"}, + {file = "urllib3-1.26.20.tar.gz", hash = "sha256:40c2dc0c681e47eb8f90e7e27bf6ff7df2e677421fd46756da1161c39ca70d32"}, ] [package.extras] -brotli = ["brotli (>=1.0.9) ; platform_python_implementation == \"CPython\"", "brotlicffi (>=0.8.0) ; platform_python_implementation != \"CPython\""] -h2 = ["h2 (>=4,<5)"] -socks = ["pysocks (>=1.5.6,!=1.5.7,<2.0)"] -zstd = ["zstandard (>=0.18.0)"] +brotli = ["brotli (==1.0.9) ; os_name != \"nt\" and python_version < \"3\" and platform_python_implementation == \"CPython\"", "brotli (>=1.0.9) ; python_version >= \"3\" and platform_python_implementation == \"CPython\"", "brotlicffi (>=0.8.0) ; (os_name != \"nt\" or python_version >= \"3\") and platform_python_implementation != \"CPython\"", "brotlipy (>=0.6.0) ; os_name == \"nt\" and python_version < \"3\""] +secure = ["certifi", "cryptography (>=1.3.4)", "idna (>=2.0.0)", "ipaddress ; python_version == \"2.7\"", "pyOpenSSL (>=0.14)", "urllib3-secure-extra"] +socks = ["PySocks (>=1.5.6,!=1.5.7,<2.0)"] [metadata] lock-version = "2.1" diff --git a/e2e_batch/pr-NNN.env b/e2e_batch/pr-NNN.env new file mode 100644 index 000000000..1073ea008 --- /dev/null +++ b/e2e_batch/pr-NNN.env @@ -0,0 +1,9 @@ +ENVIRONMENT=pr-NNN +REGION=eu-west-2 +PROXY_NAME=immunisation-fhir-api-pr-NNN +APIGEE_ENVIRONMENT=internal-dev +SERVICE_BASE_PATH=immunisation-fhir-api-pr-NNN +AUDIT_TABLE_NAME=immunisation-batch-pr-NNN-audit-table +PR_NUMBER=NNN + +AWS_PROFILE={your-aws-profile} \ No newline at end of file diff --git a/e2e_batch/scenarios.py b/e2e_batch/scenarios.py new file mode 100644 index 000000000..cd70b036d --- /dev/null +++ b/e2e_batch/scenarios.py @@ -0,0 +1,211 @@ +import pandas as pd +from datetime import datetime, timezone +from vax_suppliers import TestPair, OdsVax +from constants import ( + ActionFlag, BusRowResult, DestinationType, Operation, + ACK_BUCKET, + RAVS_URI, + OperationOutcome +) +from utils import ( + poll_s3_file_pattern, fetch_pk_and_operation_from_dynamodb, + validate_fatal_error, + get_file_content_from_s3, + aws_cleanup, + create_row, +) +from clients import logger +from errors import DynamoDBMismatchError +import uuid +import csv + + +class TestAction: + def __init__(self, action: ActionFlag, + expected_header_response_code=BusRowResult.SUCCESS, + expected_operation_outcome=''): + self.action = action + self.expected_header_response_code = expected_header_response_code + self.expected_operation_outcome = expected_operation_outcome + + +class TestCase: + def __init__(self, scenario: dict): + self.name: str = scenario.get("name", "Unnamed Test Case") + self.description: str = scenario.get("description", "") + self.ods_vax: OdsVax = scenario.get("ods_vax") + self.actions: list[TestAction] = scenario.get("actions", []) + self.ods = self.ods_vax.ods_code + self.vax = self.ods_vax.vax + self.dose_amount: float = scenario.get("dose_amount", 0.5) + self.inject_cp1252 = scenario.get("create_with_cp1252_encoded_character", False) + self.header = scenario.get("header", "NHS_NUMBER") + self.version = scenario.get("version", 5) + self.operation_outcome = scenario.get("operation_outcome", "") + self.enabled = scenario.get("enabled", False) + self.ack_keys = {DestinationType.INF: None, DestinationType.BUS: None} + # initialise attribs to be set later + self.key = None # S3 key of the uploaded file + self.file_name = None # name of the generated CSV file + self.identifier = None # unique identifier of subject in the CSV file rows + + def get_poll_destinations(self, pending: bool) -> bool: + # loop through keys in test (inf and bus) + for ack_key in self.ack_keys.keys(): + if not self.ack_keys[ack_key]: + found_ack_key = self.poll_destination(ack_key) + if found_ack_key: + self.ack_keys[ack_key] = found_ack_key + else: + pending = True + return pending + + def poll_destination(self, ack_prefix: DestinationType): + """Poll the ACK_BUCKET for an ack file that contains the input_file_name as a substring.""" + input_file_name = self.file_name + filename_without_ext = input_file_name[:-4] if input_file_name.endswith(".csv") else input_file_name + search_pattern = f"{ack_prefix}{filename_without_ext}" + return poll_s3_file_pattern(ack_prefix, search_pattern) + + def check_final_success_action(self): + desc = f"{self.name} - outcome" + outcome = self.operation_outcome + dynamo_pk, operation, is_reinstate = fetch_pk_and_operation_from_dynamodb(self.get_identifier_pk()) + + expected_operation = Operation.CREATE if outcome == ActionFlag.CREATE else outcome + if operation != expected_operation: + raise DynamoDBMismatchError( + ( + f"{desc}. Final Event Table Operation: Mismatch - DynamoDB Operation '{operation}' " + f"does not match operation requested '{outcome}' (3)" + ) + ) + + def get_identifier_pk(self): + if not self.identifier: + raise Exception("Identifier not set. Generate the CSV file first.") + return f"{RAVS_URI}#{self.identifier}" + + def check_bus_file_content(self): + desc = f"{self.name} - bus" + content = get_file_content_from_s3(ACK_BUCKET, self.ack_keys[DestinationType.BUS]) + reader = csv.DictReader(content.splitlines(), delimiter="|") + rows = list(reader) + + for i, row in enumerate(rows): + response_code = self.actions[i].expected_header_response_code + operation_outcome = self.actions[i].expected_operation_outcome + if response_code and "HEADER_RESPONSE_CODE" in row: + row_HEADER_RESPONSE_CODE = row["HEADER_RESPONSE_CODE"].strip() + assert row_HEADER_RESPONSE_CODE == response_code, ( + f"{desc}.Row {i} expected HEADER_RESPONSE_CODE '{response_code}', " + f"but got '{row_HEADER_RESPONSE_CODE}'" + ) + if operation_outcome and "OPERATION_OUTCOME" in row: + row_OPERATION_OUTCOME = row["OPERATION_OUTCOME"].strip() + assert row_OPERATION_OUTCOME.startswith(operation_outcome), ( + f"{desc}.Row {i} expected OPERATION_OUTCOME '{operation_outcome}', " + f"but got '{row_OPERATION_OUTCOME}'" + ) + elif row_HEADER_RESPONSE_CODE == "Fatal Error": + validate_fatal_error(desc, row, i, operation_outcome) + + def generate_csv_file(self): + + self.file_name = self.get_file_name(self.vax, self.ods, self.version) + logger.info(f"Test \"{self.name}\" File {self.file_name}") + data = [] + self.identifier = str(uuid.uuid4()) + for action in self.actions: + row = create_row(self.identifier, self.dose_amount, action.action, self.header, self.inject_cp1252) + logger.info(f" > {action.action} - {self.vax}/{self.ods} - {self.identifier}") + data.append(row) + df = pd.DataFrame(data) + + df.to_csv(self.file_name, index=False, sep="|", quoting=csv.QUOTE_MINIMAL) + + def get_file_name(self, vax_type, ods, version="5"): + timestamp = datetime.now(timezone.utc).strftime("%Y%m%dT%H%M%S00") + return f"{vax_type}_Vaccinations_v{version}_{ods}_{timestamp}.csv" + + def cleanup(self): + aws_cleanup(self.key, self.identifier, self.ack_keys) + + +def create_test_cases(test_case_dict: dict) -> list[TestCase]: + """Initialize test cases from a dictionary.""" + return [TestCase(name) for name in test_case_dict] + + +def enable_tests(test_cases: list[TestCase], names: list[str]) -> None: + """Enable only the test cases with the given names.""" + for name in names: + for test in test_cases: + if test.name == name: + test.enabled = True + break + else: + raise Exception(f"Test case with name '{name}' not found.") + + +def generate_csv_files(test_cases: list[TestCase]) -> list[TestCase]: + """Generate CSV files for all enabled test cases.""" + ret = [] + for test in test_cases: + if test.enabled: + test.generate_csv_file() + ret.append(test) + + +scenarios = { + "dev": [ + { + "name": "Successful Create", + "ods_vax": TestPair.E8HA94_COVID19_CUD, + "operation_outcome": ActionFlag.CREATE, + "actions": [TestAction(ActionFlag.CREATE)], + "description": "Successful Create" + }, + { + "name": "Successful Update", + "description": "Successful Create,Update", + "ods_vax": TestPair.DPSFULL_COVID19_CRUDS, + "operation_outcome": ActionFlag.UPDATE, + "actions": [TestAction(ActionFlag.CREATE), TestAction(ActionFlag.UPDATE)] + }, + { + "name": "Successful Delete", + "description": "Successful Create,Update, Delete", + "ods_vax": TestPair.V0V8L_FLU_CRUDS, + "operation_outcome": ActionFlag.DELETE_LOGICAL, + "actions": [TestAction(ActionFlag.CREATE), TestAction(ActionFlag.DELETE_LOGICAL)] + }, + { + "name": "Failed Update", + "description": "Failed Update - resource does not exist", + "ods_vax": TestPair.V0V8L_3IN1_CRUDS, + "actions": [TestAction(ActionFlag.UPDATE, + expected_header_response_code=BusRowResult.FATAL_ERROR, + expected_operation_outcome=OperationOutcome.IMMS_NOT_FOUND)], + "operation_outcome": ActionFlag.NONE + }, + { + "name": "Failed Delete", + "description": "Failed Delete - resource does not exist", + "ods_vax": TestPair.X26_MMR_CRUDS, + "actions": [TestAction(ActionFlag.DELETE_LOGICAL, + expected_header_response_code=BusRowResult.FATAL_ERROR, + expected_operation_outcome=OperationOutcome.IMMS_NOT_FOUND)], + "operation_outcome": ActionFlag.NONE + }, + { + "name": "Create with 1252 char", + "description": "Create with 1252 char", + "ods_vax": TestPair.YGA_MENACWY_CRUDS, + "operation_outcome": ActionFlag.CREATE, + "actions": [TestAction(ActionFlag.CREATE)], + "create_with_cp1252_encoded_character": True + } + ], + "ref": [] + } diff --git a/e2e_batch/test_e2e_batch.py b/e2e_batch/test_e2e_batch.py index ff7447237..6147a375a 100644 --- a/e2e_batch/test_e2e_batch.py +++ b/e2e_batch/test_e2e_batch.py @@ -1,224 +1,129 @@ import time import unittest from utils import ( - generate_csv, upload_file_to_s3, get_file_content_from_s3, - wait_for_ack_file, check_ack_file_content, validate_row_count, - upload_config_file, - generate_csv_with_ordered_100000_rows, - verify_final_ack_file, + purge_sqs_queues, delete_file_from_s3 ) +from clients import logger +from scenarios import scenarios, TestCase, create_test_cases, enable_tests, generate_csv_files + from constants import ( SOURCE_BUCKET, INPUT_PREFIX, ACK_BUCKET, - PRE_VALIDATION_ERROR, - POST_VALIDATION_ERROR, - DUPLICATE, - FILE_NAME_VAL_ERROR, - environment + environment, + DestinationType, + TEMP_ACK_PREFIX ) class TestE2EBatch(unittest.TestCase): def setUp(self): - self.uploaded_files = [] # Tracks uploaded input keys - self.ack_files = [] # Tracks ack keys + self.tests: list[TestCase] = create_test_cases(scenarios["dev"]) + enable_tests(self.tests, [ + "Successful Create", + "Successful Update", + "Successful Delete", + "Create with 1252 char", + "Failed Update", + "Failed Delete", + ]) + generate_csv_files(self.tests) def tearDown(self): - for file_key in self.uploaded_files: - delete_file_from_s3(SOURCE_BUCKET, file_key) - for ack_key in self.ack_files: - delete_file_from_s3(ACK_BUCKET, ack_key) - - if environment != "ref": - def test_create_success(self): - """Test CREATE scenario.""" - input_file = generate_csv("PHYLIS", "0.3", action_flag="CREATE") - - key = upload_file_to_s3(input_file, SOURCE_BUCKET, INPUT_PREFIX) - self.uploaded_files.append(key) - - ack_key = wait_for_ack_file(None, input_file) - self.ack_files.append(ack_key) - - validate_row_count(input_file, ack_key) - - ack_content = get_file_content_from_s3(ACK_BUCKET, ack_key) - check_ack_file_content(ack_content, "OK", None, "CREATE") - - def test_duplicate_create(self): - """Test DUPLICATE scenario.""" - - input_file = generate_csv("PHYLIS", "0.3", action_flag="CREATE", same_id=True) - - key = upload_file_to_s3(input_file, SOURCE_BUCKET, INPUT_PREFIX) - self.uploaded_files.append(key) - - ack_key = wait_for_ack_file(None, input_file) - self.ack_files.append(ack_key) - - validate_row_count(input_file, ack_key) - - ack_content = get_file_content_from_s3(ACK_BUCKET, ack_key) - check_ack_file_content(ack_content, "Fatal Error", DUPLICATE, "CREATE") - - def test_update_success(self): - """Test UPDATE scenario.""" - input_file = generate_csv("PHYLIS", "0.5", action_flag="UPDATE") - - key = upload_file_to_s3(input_file, SOURCE_BUCKET, INPUT_PREFIX) - self.uploaded_files.append(key) - - ack_key = wait_for_ack_file(None, input_file) - self.ack_files.append(ack_key) - - validate_row_count(input_file, ack_key) - - ack_content = get_file_content_from_s3(ACK_BUCKET, ack_key) - check_ack_file_content(ack_content, "OK", None, "UPDATE") - - def test_reinstated_success(self): - """Test REINSTATED scenario.""" - input_file = generate_csv("PHYLIS", "0.5", action_flag="REINSTATED") - - key = upload_file_to_s3(input_file, SOURCE_BUCKET, INPUT_PREFIX) - self.uploaded_files.append(key) - - ack_key = wait_for_ack_file(None, input_file) - self.ack_files.append(ack_key) - - validate_row_count(input_file, ack_key) - - ack_content = get_file_content_from_s3(ACK_BUCKET, ack_key) - check_ack_file_content(ack_content, "OK", None, "reinstated") - - def test_update_reinstated_success(self): - """Test UPDATE-REINSTATED scenario.""" - input_file = generate_csv("PHYLIS", "0.5", action_flag="UPDATE-REINSTATED") - - key = upload_file_to_s3(input_file, SOURCE_BUCKET, INPUT_PREFIX) - self.uploaded_files.append(key) - - ack_key = wait_for_ack_file(None, input_file) - self.ack_files.append(ack_key) - - validate_row_count(input_file, ack_key) - - ack_content = get_file_content_from_s3(ACK_BUCKET, ack_key) - check_ack_file_content(ack_content, "OK", None, "update-reinstated") - - def test_delete_success(self): - """Test DELETE scenario.""" - input_file = generate_csv("PHYLIS", "0.8", action_flag="DELETE") - - key = upload_file_to_s3(input_file, SOURCE_BUCKET, INPUT_PREFIX) - self.uploaded_files.append(key) - - ack_key = wait_for_ack_file(None, input_file) - self.ack_files.append(ack_key) - - validate_row_count(input_file, ack_key) - - ack_content = get_file_content_from_s3(ACK_BUCKET, ack_key) - check_ack_file_content(ack_content, "OK", None, "DELETE") - - def test_pre_validation_error(self): - """Test PRE-VALIDATION error scenario.""" - input_file = generate_csv("PHYLIS", "TRUE", action_flag="CREATE") - - key = upload_file_to_s3(input_file, SOURCE_BUCKET, INPUT_PREFIX) - self.uploaded_files.append(key) - - ack_key = wait_for_ack_file(None, input_file) - self.ack_files.append(ack_key) - - validate_row_count(input_file, ack_key) - - ack_content = get_file_content_from_s3(ACK_BUCKET, ack_key) - check_ack_file_content(ack_content, "Fatal Error", PRE_VALIDATION_ERROR, None) - - def test_post_validation_error(self): - """Test POST-VALIDATION error scenario.""" - input_file = generate_csv("", "0.3", action_flag="CREATE") - - key = upload_file_to_s3(input_file, SOURCE_BUCKET, INPUT_PREFIX) - self.uploaded_files.append(key) - - ack_key = wait_for_ack_file(None, input_file) - self.ack_files.append(ack_key) - - ack_content = get_file_content_from_s3(ACK_BUCKET, ack_key) - check_ack_file_content(ack_content, "Fatal Error", POST_VALIDATION_ERROR, None) - - def test_file_name_validation_error(self): - """Test FILE-NAME-VALIDATION error scenario.""" - input_file = generate_csv("PHYLIS", "0.3", action_flag="CREATE", file_key=True) - - key = upload_file_to_s3(input_file, SOURCE_BUCKET, INPUT_PREFIX) - self.uploaded_files.append(key) - - ack_key = wait_for_ack_file(True, input_file) - self.ack_files.append(ack_key) - - ack_content = get_file_content_from_s3(ACK_BUCKET, ack_key) - check_ack_file_content(ack_content, "Failure", FILE_NAME_VAL_ERROR, None) - - def test_header_name_validation_error(self): - """Test HEADER-NAME-VALIDATION error scenario.""" - input_file = generate_csv("PHYLIS", "0.3", action_flag="CREATE", headers="NH_NUMBER") - - key = upload_file_to_s3(input_file, SOURCE_BUCKET, INPUT_PREFIX) - self.uploaded_files.append(key) - - ack_key = wait_for_ack_file(True, input_file) - self.ack_files.append(ack_key) - - ack_content = get_file_content_from_s3(ACK_BUCKET, ack_key) - check_ack_file_content(ack_content, "Failure", FILE_NAME_VAL_ERROR, None) - - # This test updates the permissions_config.json file from the imms-internal-dev-supplier-config - # S3 bucket shared across multiple environments (PR environments, internal-dev, int, and ref). - # Running this may modify permissions in these environments, causing unintended side effects. - @unittest.skip("Modifies shared S3 permissions configuration") - def test_invalid_permission(self): - """Test INVALID-PERMISSION error scenario.""" - upload_config_file("MMR_FULL") # permissions_config.json is updated here - time.sleep(20) - - input_file = generate_csv("PHYLIS", "0.3", action_flag="CREATE") - - key = upload_file_to_s3(input_file, SOURCE_BUCKET, INPUT_PREFIX) - self.uploaded_files.append(key) - - ack_key = wait_for_ack_file(True, input_file) - self.ack_files.append(ack_key) - - ack_content = get_file_content_from_s3(ACK_BUCKET, ack_key) - check_ack_file_content(ack_content, "Failure", FILE_NAME_VAL_ERROR, None) - - upload_config_file("COVID19_FULL") - time.sleep(20) - - else: - def test_end_to_end_speed_test_with_100000_rows(self): - """Test end_to_end_speed_test_with_100000_rows scenario with full integration""" - input_file = generate_csv_with_ordered_100000_rows(None) - - key = upload_file_to_s3(input_file, SOURCE_BUCKET, INPUT_PREFIX) - self.uploaded_files.append(key) - - final_ack_key = wait_for_ack_file(None, input_file, timeout=1800) - self.ack_files.append(final_ack_key) - - response = verify_final_ack_file(final_ack_key) - assert response is True - - -if __name__ == "__main__": - unittest.main() + logger.info("Cleanup...") + for test in self.tests: + test.cleanup() + delete_file_from_s3(ACK_BUCKET, TEMP_ACK_PREFIX) + purge_sqs_queues() + + @unittest.skipIf(environment == "ref", "Skip for ref") + def test_batch_submission(self): + """Test all scenarios and submit as batch.""" + start_time = time.time() + max_timeout = 600 # seconds + + send_files(self.tests) + + if not poll_for_responses(self.tests, max_timeout): + logger.error("Timeout waiting for responses") + + validate_responses(self.tests) + + logger.info(f"Tests Completed. Time: {time.time() - start_time:.1f} seconds") + + +def send_files(tests: list[TestCase]): + start_time = time.time() + for test in tests: + if test.enabled: + logger.info(f"Upload {test.file_name} ") + key = upload_file_to_s3(test.file_name, SOURCE_BUCKET, INPUT_PREFIX) + test.key = key + logger.info(f"Files uploaded. Time: {time.time() - start_time:.1f} seconds") + + +def poll_for_responses(tests: list[TestCase], max_timeout=1200) -> bool: + logger.info("Waiting while processing...") + start_time = time.time() + # while there are still pending files, poll for acks and forwarded files + pending = True + while pending: + pending = False + for test in tests: + pending = test.get_poll_destinations(pending) + if pending: + print(".", end="") + time.sleep(5) + if (time.time() - start_time) > max_timeout: + return False + logger.info(f"Files processed. Time: {time.time() - start_time:.1f} seconds") + return True + + +def validate_responses(tests: list[TestCase]): + start_time = time.time() + count = 0 + expected_count = len(tests) * 2 + errors = False + try: + for test in tests: + logger.info(f"Validation for Test: {test.name} ") + # Validate the ACK file + if test.ack_keys[DestinationType.INF]: + count += 1 + inf_ack_content = get_file_content_from_s3(ACK_BUCKET, test.ack_keys[DestinationType.INF]) + check_ack_file_content(test.name, inf_ack_content, "Success", None, + test.operation_outcome) + else: + logger.error(f"INF ACK file not found for test: {test.name}") + errors = True + + if test.ack_keys[DestinationType.BUS]: + count += 1 + validate_row_count(f"{test.name} - bus", test.file_name, + test.ack_keys[DestinationType.BUS]) + + test.check_bus_file_content() + + test.check_final_success_action() + else: + logger.error(f"BUS ACK file not found for test: {test.name}") + errors = True + + except Exception as e: + logger.error(f"Error during validation: {e}") + errors = True + finally: + if count == expected_count: + logger.info("All responses subject to validation.") + else: + logger.error(f"{count} of {expected_count} responses subject to validation.") + logger.info(f"Time: {time.time() - start_time:.1f} seconds") + assert count == expected_count, f"Only {count} of {expected_count} responses subject to validation." + assert not errors, "Errors found during validation." diff --git a/e2e_batch/utils.py b/e2e_batch/utils.py index fcd9632be..560b6ed96 100644 --- a/e2e_batch/utils.py +++ b/e2e_batch/utils.py @@ -7,94 +7,28 @@ import io import os from botocore.exceptions import ClientError +from boto3.dynamodb.conditions import Key from io import StringIO from datetime import datetime, timezone -from clients import logger, s3_client, table +from clients import ( + logger, s3_client, audit_table, events_table, sqs_client, + batch_fifo_queue_url, ack_metadata_queue_url +) from errors import AckFileNotFoundError, DynamoDBMismatchError from constants import ( ACK_BUCKET, FORWARDEDFILE_PREFIX, SOURCE_BUCKET, DUPLICATE, - create_row, ACK_PREFIX, FILE_NAME_VAL_ERROR, - CONFIG_BUCKET, - create_permissions_json, - PERMISSIONS_CONFIG_FILE_KEY, - INPUT_PREFIX, HEADER_RESPONSE_CODE_COLUMN, + RAVS_URI, + ActionFlag, + environment ) -def generate_csv(fore_name, dose_amount, action_flag, headers="NHS_NUMBER", same_id=False, file_key=False): - """ - Generate a CSV file with 2 or 3 rows depending on the action_flag. - - - For CREATE: - - Both rows have unique UNIQUE_IDs with "ACTION_FLAG": "NEW". - - If same_id=True, both rows share the same UNIQUE_ID. - - - For UPDATE: - - One row has "ACTION_FLAG": "NEW" and the other "ACTION_FLAG": "UPDATE" with the same UNIQUE_ID. - - - For DELETE: - - One row has "ACTION_FLAG": "NEW" and the other "ACTION_FLAG": "DELETE" with the same UNIQUE_ID. - - - For REINSTATED: - - Three rows are generated with the same UNIQUE_ID: - - The first row has "ACTION_FLAG": "NEW". - - The second row has "ACTION_FLAG": "DELETE". - - The third row has "ACTION_FLAG": "UPDATE". - """ - - data = [] - - if action_flag == "CREATE": - if same_id: - - unique_id = str(uuid.uuid4()) - data.append(create_row(unique_id, fore_name, dose_amount, "NEW", headers)) - data.append(create_row(unique_id, fore_name, dose_amount, "NEW", headers)) - else: - unique_ids = [str(uuid.uuid4()), str(uuid.uuid4())] - for unique_id in unique_ids: - data.append(create_row(unique_id, fore_name, dose_amount, "NEW", headers)) - - elif action_flag == "UPDATE": - unique_id = str(uuid.uuid4()) - data.append(create_row(unique_id, fore_name, dose_amount, "NEW", headers)) - data.append(create_row(unique_id, fore_name, dose_amount, "UPDATE", headers)) - - elif action_flag == "DELETE": - unique_id = str(uuid.uuid4()) - data.append(create_row(unique_id, fore_name, dose_amount, "NEW", headers)) - data.append(create_row(unique_id, fore_name, dose_amount, "DELETE", headers)) - - elif action_flag == "REINSTATED": - unique_id = str(uuid.uuid4()) - data.append(create_row(unique_id, fore_name, dose_amount, "NEW", headers)) - data.append(create_row(unique_id, fore_name, dose_amount, "DELETE", headers)) - data.append(create_row(unique_id, fore_name, dose_amount, "UPDATE", headers)) - - elif action_flag == "UPDATE-REINSTATED": - unique_id = str(uuid.uuid4()) - data.append(create_row(unique_id, fore_name, dose_amount, "NEW", headers)) - data.append(create_row(unique_id, fore_name, dose_amount, "DELETE", headers)) - data.append(create_row(unique_id, fore_name, dose_amount, "UPDATE", headers)) - data.append(create_row(unique_id, "fore_name", dose_amount, "UPDATE", headers)) - - df = pd.DataFrame(data) - timestamp = datetime.now(timezone.utc).strftime("%Y%m%dT%H%M%S%f")[:-3] - file_name = ( - f"COVID19_Vaccinations_v4_YGM41_{timestamp}.csv" - if file_key - else f"COVID19_Vaccinations_v5_YGM41_{timestamp}.csv" - ) - df.to_csv(file_name, index=False, sep="|", quoting=csv.QUOTE_MINIMAL) - return file_name - - def upload_file_to_s3(file_name, bucket, prefix): """Upload the given file to the specified bucket under the provided prefix. Returns the S3 key if successful, or raises an exception.""" @@ -122,14 +56,15 @@ def delete_file_from_s3(bucket, key): """Delete the specified file (object) from the given S3 bucket. Returns True if deletion is successful, otherwise raises an exception.""" try: - response = s3_client.delete_object(Bucket=bucket, Key=key) + if key and key.strip(): + response = s3_client.delete_object(Bucket=bucket, Key=key) - # Optionally verify deletion status - status_code = response.get("ResponseMetadata", {}).get("HTTPStatusCode") - if status_code != 204: - raise Exception(f"Delete failed with status code: {status_code}") + # Optionally verify deletion status + status_code = response.get("ResponseMetadata", {}).get("HTTPStatusCode") + if status_code != 204: + raise Exception(f"Delete failed with status code: {status_code}") - print(f"Deleted {key} from bucket {bucket}") + print(f"Deleted {key}") return True except ClientError as e: @@ -138,7 +73,7 @@ def delete_file_from_s3(bucket, key): raise Exception(f"Unexpected error during file deletion: {e}") -def wait_for_ack_file(ack_prefix, input_file_name, timeout=120): +def wait_for_ack_file(ack_prefix, input_file_name, timeout=600): """Poll the ACK_BUCKET for an ack file that contains the input_file_name as a substring.""" filename_without_ext = input_file_name[:-4] if input_file_name.endswith(".csv") else input_file_name @@ -170,7 +105,7 @@ def get_file_content_from_s3(bucket, key): return content -def check_ack_file_content(content, response_code, operation_outcome, operation_requested): +def check_ack_file_content(desc, content, response_code, operation_outcome, operation_requested) -> bool: """ Parse and validate the acknowledgment (ACK) CSV file content. @@ -206,23 +141,23 @@ def check_ack_file_content(content, response_code, operation_outcome, operation_ if operation_outcome and DUPLICATE in operation_outcome: # Handle DUPLICATE scenario: - assert len(rows) == 2, f"Expected 2 rows for DUPLICATE scenario, got {len(rows)}" + assert len(rows) == 2, f"{desc}. Expected 2 rows for DUPLICATE scenario, got {len(rows)}" first_row = rows[0] - validate_header_response_code(first_row, 0, "OK") + validate_header_response_code(desc, first_row, 0, "OK") validate_ok_response(first_row, 0, operation_requested) second_row = rows[1] - validate_header_response_code(second_row, 1, "Fatal Error") - validate_fatal_error(second_row, 1, DUPLICATE) + validate_header_response_code(desc, second_row, 1, "Fatal Error") + validate_fatal_error(desc, second_row, 1, DUPLICATE) else: # Handle normal scenarios: for i, row in enumerate(rows): if response_code and "HEADER_RESPONSE_CODE" in row: - assert row["HEADER_RESPONSE_CODE"].strip() == response_code, ( - f"Row {i} expected HEADER_RESPONSE_CODE '{response_code}', " - f"but got '{row['HEADER_RESPONSE_CODE'].strip()}'" - ) + row_HEADER_RESPONSE_CODE = row["HEADER_RESPONSE_CODE"].strip() + assert row_HEADER_RESPONSE_CODE == response_code, ( + f"{desc}.Row {i} expected HEADER_RESPONSE_CODE '{response_code}', " + f"but got '{row_HEADER_RESPONSE_CODE}'") if operation_outcome and "OPERATION_OUTCOME" in row: assert row["OPERATION_OUTCOME"].strip() == operation_outcome, ( f"Row {i} expected OPERATION_OUTCOME '{operation_outcome}', " @@ -234,7 +169,7 @@ def check_ack_file_content(content, response_code, operation_outcome, operation_ validate_fatal_error(row, i, operation_outcome) -def validate_header_response_code(row, index, expected_code): +def validate_header_response_code(desc, row, index, expected_code): """Ensure HEADER_RESPONSE_CODE exists and matches expected response code.""" if "HEADER_RESPONSE_CODE" not in row: @@ -342,24 +277,25 @@ def fetch_pk_and_operation_from_dynamodb(identifier_pk): Logs any exceptions encountered during the DynamoDB query. """ try: - response = table.query( + response = events_table.query( IndexName="IdentifierGSI", KeyConditionExpression="IdentifierPK = :identifier_pk", ExpressionAttributeValues={":identifier_pk": identifier_pk}, ) - if "Items" in response and response["Items"] and "DeletedAt" in response["Items"][0]: - return (response["Items"][0]["PK"], response["Items"][0]["Operation"], response["Items"][0]["DeletedAt"]) - if "Items" in response and response["Items"]: - return (response["Items"][0]["PK"], response["Items"][0]["Operation"], None) - else: - return "NOT_FOUND" + if "Items" in response: + items = response["Items"] + if items: + if "DeletedAt" in items[0]: + return (items[0]["PK"], items[0]["Operation"], items[0]["DeletedAt"]) + return (items[0]["PK"], items[0]["Operation"], None) + return (identifier_pk, ActionFlag.NONE, None) except Exception as e: logger.error(f"Error fetching from DynamoDB: {e}") return "ERROR" -def validate_row_count(source_file_name, ack_file_name): +def validate_row_count(desc, source_file_name, ack_file_name): """ Compare the row count of a file in one S3 bucket with a file in another S3 bucket. Raises: @@ -369,7 +305,7 @@ def validate_row_count(source_file_name, ack_file_name): ack_file_row_count = fetch_row_count(ACK_BUCKET, ack_file_name) assert ( source_file_row_count == ack_file_row_count - ), f"Row count mismatch: Input ({source_file_row_count}) vs Ack ({ack_file_row_count})" + ), f"{desc}. Row count mismatch: Input ({source_file_row_count}) vs Ack ({ack_file_row_count})" def fetch_row_count(bucket, file_name): @@ -385,12 +321,6 @@ def save_json_to_file(json_data, filename="permissions_config.json"): json.dump(json_data, json_file, indent=4) -def upload_config_file(value): - input_file = create_permissions_json(value) - save_json_to_file(input_file) - upload_file_to_s3(PERMISSIONS_CONFIG_FILE_KEY, CONFIG_BUCKET, INPUT_PREFIX) - - def generate_csv_with_ordered_100000_rows(file_name=None): """ Generate a CSV where: @@ -465,3 +395,123 @@ def verify_final_ack_file(file_key): f"All values OK: {all_ok}" ) return True + + +def delete_filename_from_audit_table(filename) -> bool: + + # 1. Query the GSI to get all items with the given filename + try: + response = audit_table.query( + IndexName="filename_index", + KeyConditionExpression=Key("filename").eq(filename) + ) + items = response.get("Items", []) + + # 2. Delete each item by primary key (message_id) + for item in items: + audit_table.delete_item(Key={"message_id": item["message_id"]}) + return True + except Exception as e: + logger.error(f"Error deleting from audit table: {e}") + return False + + +def delete_filename_from_events_table(identifier) -> bool: + + # 1. Query the GSI to get all items with the given filename + try: + identifier_pk = f"{RAVS_URI}#{identifier}" + response = events_table.query( + IndexName="IdentifierGSI", + KeyConditionExpression=Key("IdentifierPK").eq(identifier_pk) + ) + items = response.get("Items", []) + + # 2. Delete each item by primary key (PK) + for item in items: + events_table.delete_item(Key={"PK": item["PK"]}) + return True + except Exception as e: + logger.warning(f"Error deleting from events table: {e}") + return False + + +def poll_s3_file_pattern(prefix, search_pattern): + """Poll the ACK_BUCKET for an ack file that contains the input_file_name as a substring.""" + + response = s3_client.list_objects_v2(Bucket=ACK_BUCKET, Prefix=prefix) + if "Contents" in response: + for obj in response["Contents"]: + key = obj["Key"] + if search_pattern in key: + return key + return None + + +def aws_cleanup(key, identifier, ack_keys): + if key: + archive_file = f"archive/{key}" + if not delete_file_from_s3(SOURCE_BUCKET, archive_file): + logger.warning(f"S3 delete fail {SOURCE_BUCKET}: {archive_file}") + delete_filename_from_audit_table(key) + delete_filename_from_events_table(identifier) + for ack_key in ack_keys.values(): + if ack_key: + if not delete_file_from_s3(ACK_BUCKET, ack_key): + logger.warning(f"s3 delete fail {ACK_BUCKET}: {ack_key}") + + +def purge_sqs_queues() -> bool: + try: + # only purge if ENVIRONMENT=pr-* to avoid purging shared queues + if environment.startswith("pr-"): + sqs_client.purge_queue(QueueUrl=batch_fifo_queue_url) + sqs_client.purge_queue(QueueUrl=ack_metadata_queue_url) + return True + except sqs_client.exceptions.PurgeQueueInProgress: + logger.error("SQS purge already in progress. Try again later.") + except Exception as e: + logger.error(f"SQS Purge error: {e}") + return False + + +def create_row(unique_id, dose_amount, action_flag: str, header, inject_cp1252=None): + """Helper function to create a single row with the specified UNIQUE_ID and ACTION_FLAG.""" + + name = "James" if not inject_cp1252 else b'Jam\xe9s' + return { + header: "9732928395", + "PERSON_FORENAME": "PHYLIS", + "PERSON_SURNAME": name, + "PERSON_DOB": "20080217", + "PERSON_GENDER_CODE": "0", + "PERSON_POSTCODE": "WD25 0DZ", + "DATE_AND_TIME": datetime.now(timezone.utc).strftime("%Y%m%dT%H%M%S"), + "SITE_CODE": "RVVKC", + "SITE_CODE_TYPE_URI": "https://fhir.nhs.uk/Id/ods-organization-code", + "UNIQUE_ID": unique_id, + "UNIQUE_ID_URI": RAVS_URI, + "ACTION_FLAG": action_flag, + "PERFORMING_PROFESSIONAL_FORENAME": "PHYLIS", + "PERFORMING_PROFESSIONAL_SURNAME": name, + "RECORDED_DATE": datetime.now(timezone.utc).strftime("%Y%m%d"), + "PRIMARY_SOURCE": "TRUE", + "VACCINATION_PROCEDURE_CODE": "956951000000104", + "VACCINATION_PROCEDURE_TERM": "RSV vaccination in pregnancy (procedure)", + "DOSE_SEQUENCE": "1", + "VACCINE_PRODUCT_CODE": "42223111000001107", + "VACCINE_PRODUCT_TERM": "Quadrivalent influenza vaccine (Sanofi Pasteur)", + "VACCINE_MANUFACTURER": "Sanofi Pasteur", + "BATCH_NUMBER": "BN92478105653", + "EXPIRY_DATE": "20240915", + "SITE_OF_VACCINATION_CODE": "368209003", + "SITE_OF_VACCINATION_TERM": "Right arm", + "ROUTE_OF_VACCINATION_CODE": "1210999013", + "ROUTE_OF_VACCINATION_TERM": "Intradermal use", + "DOSE_AMOUNT": dose_amount, + "DOSE_UNIT_CODE": "2622896019", + "DOSE_UNIT_TERM": "Inhalation - unit of product usage", + "INDICATION_CODE": "1037351000000105", + "LOCATION_CODE": "RJC02", + "LOCATION_CODE_TYPE_URI": "https://fhir.nhs.uk/Id/ods-organization-code", + } diff --git a/e2e_batch/vax_suppliers.py b/e2e_batch/vax_suppliers.py new file mode 100644 index 000000000..a4d85357c --- /dev/null +++ b/e2e_batch/vax_suppliers.py @@ -0,0 +1,154 @@ +# json to represent the classes below +suppliers = { + "DPSFULL": { + "DPSFULL": { + "3IN1": "CRUDS", + "COVID19": "CRUDS", + "FLU": "CRUDS", + "HPV": "CRUDS", + "MENACWY": "CRUDS", + "MMR": "CRUDS", + "RSV": "CRUDS" + } + }, + "DPSREDUCED": { + "DPSREDUCED": { + "3IN1": "CRUDS", + "COVID19": "CRUDS", + "FLU": "CRUDS", + "HPV": "CRUDS", + "MENACWY": "CRUDS", + "MMR": "CRUDS", + "RSV": "CRUDS" + } + }, + "MAVIS": { + "V0V8L": { + "3IN1": "CRUDS", + "FLU": "CRUDS", + "HPV": "CRUDS", + "MENACWY": "CRUDS", + "MMR": "CRUDS" + } + }, + "SONAR": { + "8HK48": { + "FLU": "CD" + } + }, + "EVA": { + "8HA94": { + "COVID19": "CUD" + } + }, + "RAVS": { + "X26": { + "MMR": "CRUDS", + "RSV": "CRUDS" + }, + "X8E5B": { + "MMR": "CRUDS", + "RSV": "CRUDS" + } + }, + "EMIS": { + "YGM41": { + "3IN1": "CRUDS", + "COVID19": "CRUDS", + "HPV": "CRUDS", + "MENACWY": "CRUDS", + "MMR": "CRUDS", + "RSV": "CRUDS" + }, + "YGJ": { + "3IN1": "CRUDS", + "COVID19": "CRUDS", + "HPV": "CRUDS", + "MENACWY": "CRUDS", + "MMR": "CRUDS", + "RSV": "CRUDS" + } + }, + "TPP": { + "YGA": { + "3IN1": "CRUDS", + "HPV": "CRUDS", + "MENACWY": "CRUDS", + "MMR": "CRUDS", + "RSV": "CRUDS" + } + }, + "MEDICUS": { + "YGMYW": { + "3IN1": "CRUDS", + "HPV": "CRUDS", + "MENACWY": "CRUDS", + "MMR": "CRUDS", + "RSV": "CRUDS" + } + } +} + + +class OdsVax: + def __init__(self, ods_code: str, vax: str): + self.ods_code = ods_code + self.vax = vax + + +class TestPair: + """ + "ods_vax": TestPair.E8HA94_COVID19_CUD, + "ods_vax": TestPair.DPSFULL_COVID19_CRUDS, + "ods_vax": TestPair.V0V8L_FLU_CRUDS, + "ods_vax": TestPair.V0V8L_3IN1_CRUDS, + "ods_vax": TestPair.X26_MMR_CRUDS, + "ods_vax": TestPair.YGA_MENACWY_CRUDS, + """ + X26_MMR_CRUDS = OdsVax("X26", "MMR") + # X26_RSV_CRUDS = OdsVax("X26", "RSV") + # X8E5B_MMR_CRUDS = OdsVax("X8E5B", "MMR") + # X8E5B_RSV_CRUDS = OdsVax("X8E5B", "RSV") + # YGM41_3IN1_CRUDS = OdsVax("YGM41", "3IN1") + # YGM41_COVID19_CRUDS = OdsVax("YGM41", "COVID19") + # YGM41_HPV_CRUDS = OdsVax("YGM41", "HPV") + # YGM41_MENACWY_CRUDS = OdsVax("YGM41", "MENACWY") + # YGM41_MMR_CRUDS = OdsVax("YGM41", "MMR") + # YGM41_RSV_CRUDS = OdsVax("YGM41", "RSV") + # YGJ_3IN1_CRUDS = OdsVax("YGJ", "3IN1") + # YGJ_COVID19_CRUDS = OdsVax("YGJ", "COVID19") + # YGJ_HPV_CRUDS = OdsVax("YGJ", "HPV") + # YGJ_MENACWY_CRUDS = OdsVax("YGJ", "MENACWY") + # YGJ_MMR_CRUDS = OdsVax("YGJ", "MMR") + # YGJ_RSV_CRUDS = OdsVax("YGJ", "RSV") + # DPSFULL_3IN1_CRUDS = OdsVax("DPSFULL", "3IN1") + DPSFULL_COVID19_CRUDS = OdsVax("DPSFULL", "COVID19") + # DPSFULL_FLU_CRUDS = OdsVax("DPSFULL", "FLU") + # DPSFULL_HPV_CRUDS = OdsVax("DPSFULL", "HPV") + # DPSFULL_MENACWY_CRUDS = OdsVax("DPSFULL", "MENACWY") + # DPSFULL_MMR_CRUDS = OdsVax("DPSFULL", "MMR") + # DPSFULL_RSV_CRUDS = OdsVax("DPSFULL", "RSV") + # DPSREDUCED_3IN1_CRUDS = OdsVax("DPSREDUCED", "3IN1") + # DPSREDUCED_COVID19_CRUDS = OdsVax("DPSREDUCED", "COVID19") + # DPSREDUCED_FLU_CRUDS = OdsVax("DPSREDUCED", "FLU") + # DPSREDUCED_HPV_CRUDS = OdsVax("DPSREDUCED", "HPV") + # DPSREDUCED_MENACWY_CRUDS = OdsVax("DPSREDUCED", "MENACWY") + # DPSREDUCED_MMR_CRUDS = OdsVax("DPSREDUCED", "MMR") + # DPSREDUCED_RSV_CRUDS = OdsVax("DPSREDUCED", "RSV") + V0V8L_3IN1_CRUDS = OdsVax("V0V8L", "3IN1") + V0V8L_FLU_CRUDS = OdsVax("V0V8L", "FLU") + # V0V8L_HPV_CRUDS = OdsVax("V0V8L", "HPV") + # V0V8L_MENACWY_CRUDS = OdsVax("V0V8L", "MENACWY") + # V0V8L_MMR_CRUDS = OdsVax("V0V8L", "MMR") + # YGA_3IN1_CRUDS = OdsVax("YGA", "3IN1") + # YGA_HPV_CRUDS = OdsVax("YGA", "HPV") + YGA_MENACWY_CRUDS = OdsVax("YGA", "MENACWY") + # YGA_MMR_CRUDS = OdsVax("YGA", "MMR") + # YGA_RSV_CRUDS = OdsVax("YGA", "RSV") + # YGMYW_3IN1_CRUDS = OdsVax("YGMYW", "3IN1") + # YGMYW_HPV_CRUDS = OdsVax("YGMYW", "HPV") + # YGMYW_MENACWY_CRUDS = OdsVax("YGMYW", "MENACWY") + # YGMYW_MMR_CRUDS = OdsVax("YGMYW", "MMR") + # YGMYW_RSV_CRUDS = OdsVax("YGMYW", "RSV") + # E8HK48_FLU_CD = OdsVax("8HK48", "FLU") + E8HA94_COVID19_CUD = OdsVax("8HA94", "COVID19")