diff --git a/backend/src/authorisation/authoriser.py b/backend/src/authorisation/authoriser.py index ac5e48026..d6f406bdc 100644 --- a/backend/src/authorisation/authoriser.py +++ b/backend/src/authorisation/authoriser.py @@ -3,7 +3,7 @@ import json from authorisation.api_operation_code import ApiOperationCode -from clients import redis_client, logger +from clients import logger, redis_client from constants import SUPPLIER_PERMISSIONS_HASH_KEY diff --git a/backend/src/controller/aws_apig_event_utils.py b/backend/src/controller/aws_apig_event_utils.py index 87b6bb05e..93d7c93ac 100644 --- a/backend/src/controller/aws_apig_event_utils.py +++ b/backend/src/controller/aws_apig_event_utils.py @@ -3,7 +3,6 @@ from typing import Optional from aws_lambda_typing.events import APIGatewayProxyEventV1 - from controller.constants import SUPPLIER_SYSTEM_HEADER_NAME from models.errors import UnauthorizedError from utils import dict_utils diff --git a/backend/src/controller/fhir_api_exception_handler.py b/backend/src/controller/fhir_api_exception_handler.py index 3ec89d928..ddda753f0 100644 --- a/backend/src/controller/fhir_api_exception_handler.py +++ b/backend/src/controller/fhir_api_exception_handler.py @@ -8,12 +8,12 @@ from constants import GENERIC_SERVER_ERROR_DIAGNOSTICS_MESSAGE from controller.aws_apig_response_utils import create_response from models.errors import ( - UnauthorizedVaxError, - UnauthorizedError, + Code, ResourceNotFoundError, - create_operation_outcome, Severity, - Code, + UnauthorizedError, + UnauthorizedVaxError, + create_operation_outcome, ) _CUSTOM_EXCEPTION_TO_STATUS_MAP: dict[Type[Exception], int] = { diff --git a/backend/src/controller/fhir_controller.py b/backend/src/controller/fhir_controller.py index 319d01f44..2ccdaf4fc 100644 --- a/backend/src/controller/fhir_controller.py +++ b/backend/src/controller/fhir_controller.py @@ -8,28 +8,27 @@ from typing import Optional from aws_lambda_typing.events import APIGatewayProxyEventV1 - from controller.aws_apig_event_utils import ( - get_supplier_system_header, get_path_parameter, + get_supplier_system_header, ) from controller.aws_apig_response_utils import create_response from controller.constants import E_TAG_HEADER_NAME from controller.fhir_api_exception_handler import fhir_api_exception_handler from models.errors import ( - Severity, Code, - create_operation_outcome, - UnauthorizedError, - ResourceNotFoundError, - UnhandledResponseError, - ValidationError, IdentifierDuplicationError, ParameterException, + ResourceNotFoundError, + Severity, + UnauthorizedError, UnauthorizedVaxError, + UnhandledResponseError, + ValidationError, + create_operation_outcome, ) from models.utils.generic_utils import check_keys_in_sources -from parameter_parser import process_params, process_search_params, create_query_string +from parameter_parser import create_query_string, process_params, process_search_params from repository.fhir_repository import ImmunizationRepository, create_table from service.fhir_service import FhirService, UpdateOutcome, get_service_url diff --git a/backend/src/create_imms_handler.py b/backend/src/create_imms_handler.py index f61832123..5eee06358 100644 --- a/backend/src/create_imms_handler.py +++ b/backend/src/create_imms_handler.py @@ -8,7 +8,7 @@ from controller.fhir_controller import FhirController, make_controller from local_lambda import load_string from log_structure import function_info -from models.errors import Severity, Code, create_operation_outcome +from models.errors import Code, Severity, create_operation_outcome logging.basicConfig(level="INFO") logger = logging.getLogger() diff --git a/backend/src/delete_imms_handler.py b/backend/src/delete_imms_handler.py index e621050d3..f32ba79db 100644 --- a/backend/src/delete_imms_handler.py +++ b/backend/src/delete_imms_handler.py @@ -7,7 +7,7 @@ from controller.aws_apig_response_utils import create_response from controller.fhir_controller import FhirController, make_controller from log_structure import function_info -from models.errors import Severity, Code, create_operation_outcome +from models.errors import Code, Severity, create_operation_outcome logging.basicConfig(level="INFO") logger = logging.getLogger() diff --git a/backend/src/filter.py b/backend/src/filter.py index 8aa3f89d0..61cd8a198 100644 --- a/backend/src/filter.py +++ b/backend/src/filter.py @@ -2,9 +2,9 @@ from constants import Urls from models.utils.generic_utils import ( - is_actor_referencing_contained_resource, - get_contained_practitioner, get_contained_patient, + get_contained_practitioner, + is_actor_referencing_contained_resource, ) @@ -63,7 +63,6 @@ def replace_organization_values(imms: dict) -> dict: """ for performer in imms.get("performer", [{}]): if performer.get("actor", {}).get("type") == "Organization": - # Obfuscate or set the identifier value and system. identifier = performer["actor"].get("identifier", {}) if identifier.get("value") is not None: diff --git a/backend/src/forwarding_batch_lambda.py b/backend/src/forwarding_batch_lambda.py index 74cad1fbe..890a6f8cc 100644 --- a/backend/src/forwarding_batch_lambda.py +++ b/backend/src/forwarding_batch_lambda.py @@ -7,7 +7,6 @@ from datetime import datetime import simplejson as json - from batch.batch_filename_to_events_mapper import BatchFilenameToEventsMapper from clients import sqs_client from controller.fhir_batch_controller import ( @@ -15,12 +14,12 @@ make_batch_controller, ) from models.errors import ( - MessageNotSuccessfulError, - RecordProcessorError, CustomValidationError, IdentifierDuplicationError, - ResourceNotFoundError, + MessageNotSuccessfulError, + RecordProcessorError, ResourceFoundError, + ResourceNotFoundError, ) from repository.fhir_batch_repository import create_table diff --git a/backend/src/models/fhir_immunization.py b/backend/src/models/fhir_immunization.py index 1f6e61867..ccb4f9852 100644 --- a/backend/src/models/fhir_immunization.py +++ b/backend/src/models/fhir_immunization.py @@ -1,7 +1,6 @@ """Immunization FHIR R4B validator""" from fhir.resources.R4B.immunization import Immunization - from models.fhir_immunization_post_validators import PostValidators from models.fhir_immunization_pre_validators import PreValidators from models.utils.validation_utils import get_vaccine_type diff --git a/backend/src/models/fhir_immunization_post_validators.py b/backend/src/models/fhir_immunization_post_validators.py index f4688de1d..f38aea1e2 100644 --- a/backend/src/models/fhir_immunization_post_validators.py +++ b/backend/src/models/fhir_immunization_post_validators.py @@ -4,7 +4,7 @@ from models.field_locations import FieldLocations from models.field_names import FieldNames from models.mandation_functions import MandationFunctions -from models.utils.base_utils import obtain_field_value, obtain_field_location +from models.utils.base_utils import obtain_field_location, obtain_field_value from models.validation_sets import ValidationSets diff --git a/backend/src/models/fhir_immunization_pre_validators.py b/backend/src/models/fhir_immunization_pre_validators.py index 94e4435e4..d40d977ac 100644 --- a/backend/src/models/fhir_immunization_pre_validators.py +++ b/backend/src/models/fhir_immunization_pre_validators.py @@ -4,14 +4,14 @@ from models.constants import Constants from models.errors import MandatoryError from models.utils.generic_utils import ( - get_generic_extension_value, - generate_field_location_for_extension, check_for_unknown_elements, - patient_name_given_field_location, + generate_field_location_for_extension, + get_generic_extension_value, + patient_and_practitioner_value_and_index, patient_name_family_field_location, - practitioner_name_given_field_location, + patient_name_given_field_location, practitioner_name_family_field_location, - patient_and_practitioner_value_and_index, + practitioner_name_given_field_location, ) from models.utils.pre_validator_utils import PreValidation diff --git a/backend/src/models/field_locations.py b/backend/src/models/field_locations.py index 9c7ef1e41..031fabd37 100644 --- a/backend/src/models/field_locations.py +++ b/backend/src/models/field_locations.py @@ -8,10 +8,10 @@ from constants import Urls from models.utils.generic_utils import ( generate_field_location_for_extension, - patient_name_given_field_location, patient_name_family_field_location, - practitioner_name_given_field_location, + patient_name_given_field_location, practitioner_name_family_field_location, + practitioner_name_given_field_location, ) diff --git a/backend/src/models/obtain_field_value.py b/backend/src/models/obtain_field_value.py index 3258c4bcf..80e33c7d3 100644 --- a/backend/src/models/obtain_field_value.py +++ b/backend/src/models/obtain_field_value.py @@ -4,8 +4,8 @@ from models.utils.generic_utils import ( get_contained_patient, get_contained_practitioner, - is_organization, get_generic_extension_value, + is_organization, patient_and_practitioner_value_and_index, ) diff --git a/backend/src/models/utils/base_utils.py b/backend/src/models/utils/base_utils.py index 7314fa434..dc523c155 100644 --- a/backend/src/models/utils/base_utils.py +++ b/backend/src/models/utils/base_utils.py @@ -1,10 +1,8 @@ """Utils for backend src code""" from models.field_locations import FieldLocations - from models.obtain_field_value import ObtainFieldValue - FIELD_LOCATIONS = FieldLocations() diff --git a/backend/src/models/utils/generic_utils.py b/backend/src/models/utils/generic_utils.py index 1c91de47c..891d0d158 100644 --- a/backend/src/models/utils/generic_utils.py +++ b/backend/src/models/utils/generic_utils.py @@ -4,18 +4,19 @@ import datetime import json import urllib.parse -from typing import Literal, Union, Optional, Dict, Any +from typing import Any, Dict, Literal, Optional, Union from fhir.resources.R4B.bundle import ( Bundle as FhirBundle, +) +from fhir.resources.R4B.bundle import ( BundleEntry, - BundleLink, BundleEntrySearch, + BundleLink, ) from fhir.resources.R4B.immunization import Immunization -from stdnum.verhoeff import validate - from models.constants import Constants +from stdnum.verhoeff import validate def get_contained_resource(imms: dict, resource: Literal["Patient", "Practitioner", "QuestionnaireResponse"]): diff --git a/backend/src/models/utils/pre_validator_utils.py b/backend/src/models/utils/pre_validator_utils.py index 2ac164d47..793d86f92 100644 --- a/backend/src/models/utils/pre_validator_utils.py +++ b/backend/src/models/utils/pre_validator_utils.py @@ -1,12 +1,11 @@ -from datetime import datetime, date +from datetime import date, datetime from decimal import Decimal from typing import Union -from .generic_utils import nhs_number_mod11_check, is_valid_simple_snomed +from .generic_utils import is_valid_simple_snomed, nhs_number_mod11_check class PreValidation: - @staticmethod def for_string( field_value: str, diff --git a/backend/src/models/utils/validation_utils.py b/backend/src/models/utils/validation_utils.py index 412332e92..990821aa5 100644 --- a/backend/src/models/utils/validation_utils.py +++ b/backend/src/models/utils/validation_utils.py @@ -9,6 +9,7 @@ from models.field_names import FieldNames from models.obtain_field_value import ObtainFieldValue from models.utils.base_utils import obtain_field_location + from .generic_utils import create_diagnostics_error @@ -27,7 +28,6 @@ def get_target_disease_codes(immunization: dict): # For each item in the target disease list, extract the snomed code for i, element in enumerate(target_disease): - try: code = [x["code"] for x in element["coding"] if x.get("system") == Urls.snomed][0] except (KeyError, IndexError) as error: diff --git a/backend/src/not_found_handler.py b/backend/src/not_found_handler.py index 8acf38ab6..2f0d21fb4 100644 --- a/backend/src/not_found_handler.py +++ b/backend/src/not_found_handler.py @@ -11,7 +11,6 @@ def not_found_handler(event, context): def not_found(event, _context): - if event.get("httpMethod") not in ALLOWED_METHODS: response = { "statusCode": 405, diff --git a/backend/src/parameter_parser.py b/backend/src/parameter_parser.py index 21d996781..f72b1c0ad 100644 --- a/backend/src/parameter_parser.py +++ b/backend/src/parameter_parser.py @@ -2,10 +2,9 @@ import datetime from dataclasses import dataclass from typing import Optional -from urllib.parse import parse_qs, urlencode, quote +from urllib.parse import parse_qs, quote, urlencode from aws_lambda_typing.events import APIGatewayProxyEventV1 - from clients import redis_client from models.constants import Constants from models.errors import ParameterException diff --git a/backend/src/repository/fhir_batch_repository.py b/backend/src/repository/fhir_batch_repository.py index 7e8ad4c53..24e2fd612 100644 --- a/backend/src/repository/fhir_batch_repository.py +++ b/backend/src/repository/fhir_batch_repository.py @@ -6,14 +6,13 @@ import boto3 import botocore.exceptions import simplejson as json -from boto3.dynamodb.conditions import Key, Attr - +from boto3.dynamodb.conditions import Attr, Key from clients import logger from models.errors import ( - UnhandledResponseError, IdentifierDuplicationError, - ResourceNotFoundError, ResourceFoundError, + ResourceNotFoundError, + UnhandledResponseError, ) @@ -94,7 +93,6 @@ def __init__(self, imms: dict, vax_type: str, supplier: str, version: int): class ImmunizationBatchRepository: - def create_immunization( self, immunization: any, diff --git a/backend/src/repository/fhir_repository.py b/backend/src/repository/fhir_repository.py index efc133c8f..ed8cbd834 100644 --- a/backend/src/repository/fhir_repository.py +++ b/backend/src/repository/fhir_repository.py @@ -9,18 +9,17 @@ import simplejson as json from boto3.dynamodb.conditions import Attr, Key from botocore.config import Config -from mypy_boto3_dynamodb.service_resource import DynamoDBServiceResource, Table -from responses import logger - from models.errors import ( + IdentifierDuplicationError, ResourceNotFoundError, UnhandledResponseError, - IdentifierDuplicationError, ) from models.utils.validation_utils import ( - get_vaccine_type, check_identifier_system_value, + get_vaccine_type, ) +from mypy_boto3_dynamodb.service_resource import DynamoDBServiceResource, Table +from responses import logger def create_table(table_name=None, endpoint_url=None, region_name="eu-west-2"): diff --git a/backend/src/search_imms_handler.py b/backend/src/search_imms_handler.py index 7254566a1..f8f275ea7 100644 --- a/backend/src/search_imms_handler.py +++ b/backend/src/search_imms_handler.py @@ -6,13 +6,13 @@ import urllib.parse import uuid -from aws_lambda_typing import context as context_, events - +from aws_lambda_typing import context as context_ +from aws_lambda_typing import events from constants import GENERIC_SERVER_ERROR_DIAGNOSTICS_MESSAGE, MAX_RESPONSE_SIZE_BYTES from controller.aws_apig_response_utils import create_response from controller.fhir_controller import FhirController, make_controller from log_structure import function_info -from models.errors import Severity, Code, create_operation_outcome +from models.errors import Code, Severity, create_operation_outcome logging.basicConfig(level="INFO") logger = logging.getLogger() diff --git a/backend/src/service/fhir_batch_service.py b/backend/src/service/fhir_batch_service.py index 85e39a1e8..48c200840 100644 --- a/backend/src/service/fhir_batch_service.py +++ b/backend/src/service/fhir_batch_service.py @@ -1,9 +1,6 @@ -from pydantic import ValidationError - -from models.errors import CustomValidationError -from models.errors import MandatoryError +from models.errors import CustomValidationError, MandatoryError from models.fhir_immunization import ImmunizationValidator - +from pydantic import ValidationError from repository.fhir_batch_repository import ImmunizationBatchRepository IMMUNIZATION_VALIDATOR = ImmunizationValidator() diff --git a/backend/src/service/fhir_service.py b/backend/src/service/fhir_service.py index 45616521c..bc9000278 100644 --- a/backend/src/service/fhir_service.py +++ b/backend/src/service/fhir_service.py @@ -1,42 +1,39 @@ import datetime import logging import os - - from enum import Enum from typing import Optional, Union from uuid import uuid4 +import parameter_parser +from authorisation.api_operation_code import ApiOperationCode +from authorisation.authoriser import Authoriser from fhir.resources.R4B.bundle import ( Bundle as FhirBundle, +) +from fhir.resources.R4B.bundle import ( BundleEntry, - BundleLink, BundleEntrySearch, + BundleLink, ) from fhir.resources.R4B.immunization import Immunization -from pydantic import ValidationError - -import parameter_parser -from authorisation.api_operation_code import ApiOperationCode -from authorisation.authoriser import Authoriser - from filter import Filter from models.errors import ( - InvalidPatientId, CustomValidationError, - UnauthorizedVaxError, + InvalidPatientId, + MandatoryError, ResourceNotFoundError, + UnauthorizedVaxError, ) -from models.errors import MandatoryError from models.fhir_immunization import ImmunizationValidator - from models.utils.generic_utils import ( - nhs_number_mod11_check, - get_occurrence_datetime, form_json, get_contained_patient, + get_occurrence_datetime, + nhs_number_mod11_check, ) from models.utils.validation_utils import get_vaccine_type +from pydantic import ValidationError from repository.fhir_repository import ImmunizationRepository from timer import timed @@ -54,7 +51,6 @@ def get_service_url( service_env: str = IMMUNIZATION_ENV, service_base_path: str = IMMUNIZATION_BASE_PATH, ) -> str: - if not service_base_path: service_base_path = "immunisation-fhir-api/FHIR/R4" diff --git a/backend/src/timer.py b/backend/src/timer.py index c78fda30d..777246c79 100644 --- a/backend/src/timer.py +++ b/backend/src/timer.py @@ -1,6 +1,5 @@ import logging import time - from functools import wraps logging.basicConfig() diff --git a/backend/src/update_imms_handler.py b/backend/src/update_imms_handler.py index cee122a55..7cd441185 100644 --- a/backend/src/update_imms_handler.py +++ b/backend/src/update_imms_handler.py @@ -8,7 +8,7 @@ from controller.fhir_controller import FhirController, make_controller from local_lambda import load_string from log_structure import function_info -from models.errors import Severity, Code, create_operation_outcome +from models.errors import Code, Severity, create_operation_outcome logging.basicConfig(level="INFO") logger = logging.getLogger() diff --git a/backend/src/utils/dict_utils.py b/backend/src/utils/dict_utils.py index 1bcff54ac..a8e0cf3a3 100644 --- a/backend/src/utils/dict_utils.py +++ b/backend/src/utils/dict_utils.py @@ -1,6 +1,6 @@ """Generic helper module for Python dictionary utility functions""" -from typing import Optional, Any +from typing import Any, Optional def get_field(target_dict: dict, *args: str, default: Optional[Any] = None) -> Any: diff --git a/backend/tests/controller/test_fhir_api_exception_handler.py b/backend/tests/controller/test_fhir_api_exception_handler.py index bcc6d4792..668bec628 100644 --- a/backend/tests/controller/test_fhir_api_exception_handler.py +++ b/backend/tests/controller/test_fhir_api_exception_handler.py @@ -3,7 +3,7 @@ from unittest.mock import patch from controller.fhir_api_exception_handler import fhir_api_exception_handler -from models.errors import UnauthorizedError, UnauthorizedVaxError, ResourceNotFoundError +from models.errors import ResourceNotFoundError, UnauthorizedError, UnauthorizedVaxError class TestFhirApiExceptionHandler(unittest.TestCase): diff --git a/backend/tests/controller/test_fhir_batch_controller.py b/backend/tests/controller/test_fhir_batch_controller.py index b06bc9164..44a230513 100644 --- a/backend/tests/controller/test_fhir_batch_controller.py +++ b/backend/tests/controller/test_fhir_batch_controller.py @@ -4,10 +4,10 @@ from controller.fhir_batch_controller import ImmunizationBatchController from models.errors import ( - ResourceNotFoundError, - UnhandledResponseError, CustomValidationError, IdentifierDuplicationError, + ResourceNotFoundError, + UnhandledResponseError, ) from repository.fhir_batch_repository import ImmunizationBatchRepository from service.fhir_batch_service import ImmunizationBatchService @@ -15,7 +15,6 @@ class TestCreateImmunizationBatchController(unittest.TestCase): - def setUp(self): self.mock_repo = create_autospec(ImmunizationBatchRepository) self.mock_service = create_autospec(ImmunizationBatchService) @@ -132,7 +131,6 @@ def test_send_request_to_dynamo_create_unhandled_error(self): class TestUpdateImmunizationBatchController(unittest.TestCase): - def setUp(self): self.mock_repo = create_autospec(ImmunizationBatchRepository) self.mock_service = create_autospec(ImmunizationBatchService) @@ -248,7 +246,6 @@ def test_send_request_to_dynamo_update_unhandled_error(self): class TestDeleteImmunizationBatchController(unittest.TestCase): - def setUp(self): self.mock_repo = create_autospec(ImmunizationBatchRepository) self.mock_service = create_autospec(ImmunizationBatchService) diff --git a/backend/tests/controller/test_fhir_controller.py b/backend/tests/controller/test_fhir_controller.py index 8c298faa2..8a7d2ddf1 100644 --- a/backend/tests/controller/test_fhir_controller.py +++ b/backend/tests/controller/test_fhir_controller.py @@ -4,22 +4,21 @@ import urllib import urllib.parse import uuid -from unittest.mock import create_autospec, ANY, patch, Mock +from unittest.mock import ANY, Mock, create_autospec, patch from urllib.parse import urlencode -from fhir.resources.R4B.bundle import Bundle -from fhir.resources.R4B.immunization import Immunization - from controller.aws_apig_response_utils import create_response from controller.fhir_controller import FhirController +from fhir.resources.R4B.bundle import Bundle +from fhir.resources.R4B.immunization import Immunization from models.errors import ( - ResourceNotFoundError, - UnhandledResponseError, - InvalidPatientId, CustomValidationError, + IdentifierDuplicationError, + InvalidPatientId, ParameterException, + ResourceNotFoundError, UnauthorizedVaxError, - IdentifierDuplicationError, + UnhandledResponseError, ) from parameter_parser import patient_identifier_system, process_search_params from repository.fhir_repository import ImmunizationRepository diff --git a/backend/tests/models/utils/test_generic_utils.py b/backend/tests/models/utils/test_generic_utils.py index f02d09f41..86c4014dd 100644 --- a/backend/tests/models/utils/test_generic_utils.py +++ b/backend/tests/models/utils/test_generic_utils.py @@ -1,11 +1,10 @@ """Generic utils for tests""" import unittest -from datetime import datetime, date +from datetime import date, datetime from src.models.utils.generic_utils import form_json - -from testing_utils.generic_utils import load_json_data, format_date_types +from testing_utils.generic_utils import format_date_types, load_json_data class TestFormJson(unittest.TestCase): diff --git a/backend/tests/repository/test_fhir_batch_repository.py b/backend/tests/repository/test_fhir_batch_repository.py index ee7ff89f8..3f84ae51b 100644 --- a/backend/tests/repository/test_fhir_batch_repository.py +++ b/backend/tests/repository/test_fhir_batch_repository.py @@ -1,19 +1,18 @@ import os import unittest -from unittest.mock import MagicMock, ANY, patch +from unittest.mock import ANY, MagicMock, patch from uuid import uuid4 import boto3 import botocore.exceptions import simplejson as json -from moto import mock_aws - from models.errors import ( IdentifierDuplicationError, + ResourceFoundError, ResourceNotFoundError, UnhandledResponseError, - ResourceFoundError, ) +from moto import mock_aws from repository.fhir_batch_repository import ImmunizationBatchRepository, create_table from testing_utils.immunization_utils import create_covid_19_immunization_dict @@ -26,7 +25,6 @@ def _make_immunization_pk(_id): @mock_aws class TestImmunizationBatchRepository(unittest.TestCase): - def setUp(self): os.environ["DYNAMODB_TABLE_NAME"] = "test-immunization-table" self.dynamodb = boto3.resource("dynamodb", region_name="eu-west-2") @@ -47,7 +45,6 @@ def tearDown(self): class TestCreateImmunization(TestImmunizationBatchRepository): - def modify_immunization(self, remove_nhs): """Modify the immunization object by removing NHS number if required""" if remove_nhs: @@ -77,7 +74,7 @@ def create_immunization_test_logic(self, is_present, remove_nhs): }, ConditionExpression=ANY, ) - self.assertEqual(item["PK"], f'Immunization#{self.immunization["id"]}') + self.assertEqual(item["PK"], f"Immunization#{self.immunization['id']}") def test_create_immunization_with_nhs_number(self): """Test creating Immunization with NHS number.""" @@ -219,7 +216,7 @@ def test_update_immunization(self): ReturnValues=ANY, ConditionExpression=ANY, ) - self.assertEqual(response, f'Immunization#{self.immunization["id"]}') + self.assertEqual(response, f"Immunization#{self.immunization['id']}") def test_update_immunization_not_found(self): """it should not update Immunization since the imms id not found""" @@ -332,7 +329,7 @@ def test_delete_immunization(self): ReturnValues=ANY, ConditionExpression=ANY, ) - self.assertEqual(response, f'Immunization#{self.immunization ["id"]}') + self.assertEqual(response, f"Immunization#{self.immunization['id']}") def test_delete_immunization_not_found(self): """it should not delete Immunization since the imms id not found""" @@ -417,7 +414,6 @@ def test_delete_immunization_conditionalcheckfailedexception_error(self): @mock_aws @patch.dict(os.environ, {"DYNAMODB_TABLE_NAME": "TestTable"}) class TestCreateTable(TestImmunizationBatchRepository): - def test_create_table_success(self): """Test if create_table returns a DynamoDB Table instance with the correct name""" diff --git a/backend/tests/repository/test_fhir_repository.py b/backend/tests/repository/test_fhir_repository.py index a09341892..7cd39b30e 100644 --- a/backend/tests/repository/test_fhir_repository.py +++ b/backend/tests/repository/test_fhir_repository.py @@ -1,16 +1,15 @@ import time import unittest import uuid -from unittest.mock import MagicMock, patch, ANY +from unittest.mock import ANY, MagicMock, patch import botocore.exceptions import simplejson as json from boto3.dynamodb.conditions import Attr, Key - from models.errors import ( + IdentifierDuplicationError, ResourceNotFoundError, UnhandledResponseError, - IdentifierDuplicationError, ) from models.utils.validation_utils import get_vaccine_type from repository.fhir_repository import ImmunizationRepository diff --git a/backend/tests/service/test_fhir_batch_service.py b/backend/tests/service/test_fhir_batch_service.py index 2f4855280..c1e618e74 100644 --- a/backend/tests/service/test_fhir_batch_service.py +++ b/backend/tests/service/test_fhir_batch_service.py @@ -27,7 +27,6 @@ def tearDown(self): class TestCreateImmunizationBatchService(TestFhirBatchServiceBase): - def setUp(self): super().setUp() self.mock_repo = create_autospec(ImmunizationBatchRepository) @@ -98,7 +97,6 @@ def test_create_immunization_post_validation_error(self): class TestUpdateImmunizationBatchService(TestFhirBatchServiceBase): - def setUp(self): super().setUp() self.mock_repo = create_autospec(ImmunizationBatchRepository) @@ -170,7 +168,6 @@ def test_update_immunization_post_validation_error(self): class TestDeleteImmunizationBatchService(unittest.TestCase): - def setUp(self): self.mock_repo = create_autospec(ImmunizationBatchRepository) self.mock_validator = create_autospec(ImmunizationValidator) diff --git a/backend/tests/service/test_fhir_service.py b/backend/tests/service/test_fhir_service.py index 66ddebf3a..658046163 100644 --- a/backend/tests/service/test_fhir_service.py +++ b/backend/tests/service/test_fhir_service.py @@ -5,33 +5,32 @@ import uuid from copy import deepcopy from decimal import Decimal -from unittest.mock import MagicMock -from unittest.mock import create_autospec, patch - -from fhir.resources.R4B.bundle import Bundle as FhirBundle, BundleEntry -from fhir.resources.R4B.immunization import Immunization -from pydantic import ValidationError -from pydantic.error_wrappers import ErrorWrapper +from unittest.mock import MagicMock, create_autospec, patch from authorisation.api_operation_code import ApiOperationCode from authorisation.authoriser import Authoriser from constants import NHS_NUMBER_USED_IN_SAMPLE_DATA +from fhir.resources.R4B.bundle import Bundle as FhirBundle +from fhir.resources.R4B.bundle import BundleEntry +from fhir.resources.R4B.immunization import Immunization from models.errors import ( - InvalidPatientId, CustomValidationError, - UnauthorizedVaxError, + InvalidPatientId, ResourceNotFoundError, + UnauthorizedVaxError, ) from models.fhir_immunization import ImmunizationValidator from models.utils.generic_utils import get_contained_patient +from pydantic import ValidationError +from pydantic.error_wrappers import ErrorWrapper from repository.fhir_repository import ImmunizationRepository from service.fhir_service import FhirService, UpdateOutcome, get_service_url from testing_utils.generic_utils import load_json_data from testing_utils.immunization_utils import ( + VALID_NHS_NUMBER, create_covid_19_immunization, create_covid_19_immunization_dict, create_covid_19_immunization_dict_no_id, - VALID_NHS_NUMBER, ) @@ -51,7 +50,6 @@ def tearDown(self): class TestServiceUrl(unittest.TestCase): - def setUp(self): self.logger_info_patcher = patch("logging.Logger.info") self.mock_logger_info = self.logger_info_patcher.start() @@ -379,11 +377,14 @@ def test_get_immunization_by_identifier(self): mock_resource = create_covid_19_immunization_dict(identifier) self.authoriser.authorise.return_value = True - self.imms_repo.get_immunization_by_identifier.return_value = { - "resource": mock_resource, - "id": imms_id, - "version": 1, - }, "covid19" + self.imms_repo.get_immunization_by_identifier.return_value = ( + { + "resource": mock_resource, + "id": imms_id, + "version": 1, + }, + "covid19", + ) # When service_resp = self.fhir_service.get_immunization_by_identifier( @@ -409,10 +410,13 @@ def test_get_immunization_by_identifier_raises_error_when_not_authorised(self): identifier = "test" element = "id,mEta,DDD" self.authoriser.authorise.return_value = False - self.imms_repo.get_immunization_by_identifier.return_value = { - "id": "foo", - "version": 1, - }, "covid19" + self.imms_repo.get_immunization_by_identifier.return_value = ( + { + "id": "foo", + "version": 1, + }, + "covid19", + ) with self.assertRaises(UnauthorizedVaxError): # When diff --git a/backend/tests/test_api_errors.py b/backend/tests/test_api_errors.py index e10618998..6474dd35d 100644 --- a/backend/tests/test_api_errors.py +++ b/backend/tests/test_api_errors.py @@ -1,6 +1,6 @@ import unittest -from models.errors import Severity, Code, create_operation_outcome +from models.errors import Code, Severity, create_operation_outcome "test" diff --git a/backend/tests/test_create_imms.py b/backend/tests/test_create_imms.py index d34a433ba..6f1abc0a9 100644 --- a/backend/tests/test_create_imms.py +++ b/backend/tests/test_create_imms.py @@ -5,7 +5,7 @@ from constants import GENERIC_SERVER_ERROR_DIAGNOSTICS_MESSAGE from controller.fhir_controller import FhirController from create_imms_handler import create_immunization -from models.errors import Severity, Code, create_operation_outcome +from models.errors import Code, Severity, create_operation_outcome class TestCreateImmunizationById(unittest.TestCase): diff --git a/backend/tests/test_delete_imms.py b/backend/tests/test_delete_imms.py index 34b0c434a..acc646582 100644 --- a/backend/tests/test_delete_imms.py +++ b/backend/tests/test_delete_imms.py @@ -5,7 +5,7 @@ from constants import GENERIC_SERVER_ERROR_DIAGNOSTICS_MESSAGE from controller.fhir_controller import FhirController from delete_imms_handler import delete_immunization -from models.errors import Severity, Code, create_operation_outcome +from models.errors import Code, Severity, create_operation_outcome class TestDeleteImmunizationById(unittest.TestCase): diff --git a/backend/tests/test_filter.py b/backend/tests/test_filter.py index d2148ddb1..40f6eec25 100644 --- a/backend/tests/test_filter.py +++ b/backend/tests/test_filter.py @@ -7,9 +7,9 @@ from constants import Urls from filter import ( Filter, - remove_reference_to_contained_practitioner, - create_reference_to_patient_resource, add_use_to_identifier, + create_reference_to_patient_resource, + remove_reference_to_contained_practitioner, replace_address_postal_codes, replace_organization_values, ) diff --git a/backend/tests/test_forwarding_batch_lambda.py b/backend/tests/test_forwarding_batch_lambda.py index 3b33b56af..d1bcb294b 100644 --- a/backend/tests/test_forwarding_batch_lambda.py +++ b/backend/tests/test_forwarding_batch_lambda.py @@ -5,32 +5,30 @@ import unittest from typing import Optional from unittest import TestCase -from unittest.mock import patch, MagicMock, ANY +from unittest.mock import ANY, MagicMock, patch from boto3 import resource as boto3_resource -from moto import mock_aws - from models.errors import ( - MessageNotSuccessfulError, - RecordProcessorError, CustomValidationError, IdentifierDuplicationError, - ResourceNotFoundError, + MessageNotSuccessfulError, + RecordProcessorError, ResourceFoundError, + ResourceNotFoundError, ) +from moto import mock_aws from testing_utils.test_utils_for_batch import ForwarderValues, MockFhirImmsResources with patch.dict("os.environ", ForwarderValues.MOCK_ENVIRONMENT_DICT): from forwarding_batch_lambda import ( - forward_lambda_handler, create_diagnostics_dictionary, + forward_lambda_handler, ) @mock_aws @patch.dict(os.environ, ForwarderValues.MOCK_ENVIRONMENT_DICT) class TestForwardLambdaHandler(TestCase): - def setUp(self): """Set up dynamodb table test values to be used for the tests""" self.dynamodb_resource = boto3_resource("dynamodb", "eu-west-2") diff --git a/backend/tests/test_immunization_post_validator.py b/backend/tests/test_immunization_post_validator.py index f04e1226e..fea868789 100644 --- a/backend/tests/test_immunization_post_validator.py +++ b/backend/tests/test_immunization_post_validator.py @@ -5,15 +5,16 @@ from unittest.mock import patch from jsonpath_ng.ext import parse -from pydantic import ValidationError - from models.fhir_immunization import ImmunizationValidator +from pydantic import ValidationError +from testing_utils.generic_utils import ( + load_json_data, + update_contained_resource_field, +) from testing_utils.generic_utils import ( # these have an underscore to avoid pytest collecting them as tests test_invalid_values_rejected as _test_invalid_values_rejected, - load_json_data, ) -from testing_utils.generic_utils import update_contained_resource_field from testing_utils.mandation_test_utils import MandationTests from testing_utils.values_for_tests import NameInstances @@ -570,9 +571,9 @@ def test_post_pre_validate_extension_url(self): self.mock_redis_client.hget.side_effect = None self.mock_redis_client.hget.return_value = "COVID19" invalid_json_data = deepcopy(self.completed_json_data["COVID19"]) - invalid_json_data["extension"][0]["valueCodeableConcept"]["coding"][0][ - "system" - ] = "https://xyz/Extension-UKCore-VaccinationProcedure" + invalid_json_data["extension"][0]["valueCodeableConcept"]["coding"][0]["system"] = ( + "https://xyz/Extension-UKCore-VaccinationProcedure" + ) with self.assertRaises(Exception) as error: self.validator.validate(invalid_json_data) diff --git a/backend/tests/test_immunization_pre_validator.py b/backend/tests/test_immunization_pre_validator.py index a259106c4..1bafd2994 100644 --- a/backend/tests/test_immunization_pre_validator.py +++ b/backend/tests/test_immunization_pre_validator.py @@ -6,24 +6,27 @@ from unittest.mock import patch from jsonpath_ng.ext import parse - from models.fhir_immunization import ImmunizationValidator from models.fhir_immunization_pre_validators import PreValidators -from models.utils.generic_utils import get_generic_extension_value from models.utils.generic_utils import ( - patient_name_given_field_location, + get_generic_extension_value, patient_name_family_field_location, - practitioner_name_given_field_location, + patient_name_given_field_location, practitioner_name_family_field_location, + practitioner_name_given_field_location, +) +from testing_utils.generic_utils import ( + load_json_data, +) +from testing_utils.generic_utils import ( + test_invalid_values_rejected as _test_invalid_values_rejected, ) from testing_utils.generic_utils import ( # these have an underscore to avoid pytest collecting them as tests test_valid_values_accepted as _test_valid_values_accepted, - test_invalid_values_rejected as _test_invalid_values_rejected, - load_json_data, ) from testing_utils.pre_validation_test_utils import ValidatorModelTests -from testing_utils.values_for_tests import ValidValues, InvalidValues +from testing_utils.values_for_tests import InvalidValues, ValidValues class TestImmunizationModelPreValidationRules(unittest.TestCase): diff --git a/backend/tests/test_lambda_handler.py b/backend/tests/test_lambda_handler.py index 9ea8bc62d..9cc39317e 100644 --- a/backend/tests/test_lambda_handler.py +++ b/backend/tests/test_lambda_handler.py @@ -19,5 +19,4 @@ def test_unsupported_method(self): if __name__ == "__main__": - unittest.main() diff --git a/backend/tests/test_log_structure_wrapper.py b/backend/tests/test_log_structure_wrapper.py index 5b0838563..f0e4afd66 100644 --- a/backend/tests/test_log_structure_wrapper.py +++ b/backend/tests/test_log_structure_wrapper.py @@ -8,7 +8,6 @@ @patch("log_structure.firehose_logger") @patch("log_structure.logger") class TestFunctionInfoWrapper(unittest.TestCase): - def setUp(self): self.redis_patcher = patch("models.utils.validation_utils.redis_client") self.mock_redis_client = self.redis_patcher.start() diff --git a/backend/tests/test_parameter_parser.py b/backend/tests/test_parameter_parser.py index 8b062f147..0d68af685 100644 --- a/backend/tests/test_parameter_parser.py +++ b/backend/tests/test_parameter_parser.py @@ -5,13 +5,13 @@ from models.errors import ParameterException from parameter_parser import ( + SearchParams, + create_query_string, date_from_key, date_to_key, + include_key, process_params, process_search_params, - create_query_string, - include_key, - SearchParams, ) from service.fhir_service import FhirService diff --git a/backend/tests/test_search_imms.py b/backend/tests/test_search_imms.py index 0041719c3..1ef035c87 100644 --- a/backend/tests/test_search_imms.py +++ b/backend/tests/test_search_imms.py @@ -5,7 +5,7 @@ from constants import GENERIC_SERVER_ERROR_DIAGNOSTICS_MESSAGE from controller.fhir_controller import FhirController -from models.errors import Severity, Code, create_operation_outcome +from models.errors import Code, Severity, create_operation_outcome from search_imms_handler import search_imms script_location = Path(__file__).absolute().parent diff --git a/backend/tests/test_validation_utils.py b/backend/tests/test_validation_utils.py index c5914b2a1..d8e3c730e 100644 --- a/backend/tests/test_validation_utils.py +++ b/backend/tests/test_validation_utils.py @@ -2,19 +2,18 @@ from copy import deepcopy from jsonpath_ng.ext import parse - from models.fhir_immunization import ImmunizationValidator from models.obtain_field_value import ObtainFieldValue from models.utils.generic_utils import ( get_current_name_instance, obtain_current_name_period, - patient_and_practitioner_value_and_index, obtain_name_field_location, + patient_and_practitioner_value_and_index, ) from testing_utils.generic_utils import ( load_json_data, ) -from testing_utils.values_for_tests import ValidValues, InvalidValues, NameInstances +from testing_utils.values_for_tests import InvalidValues, NameInstances, ValidValues class TestValidatorUtils(unittest.TestCase): diff --git a/backend/tests/testing_utils/generic_utils.py b/backend/tests/testing_utils/generic_utils.py index e8aff0ea4..4f00e36ec 100644 --- a/backend/tests/testing_utils/generic_utils.py +++ b/backend/tests/testing_utils/generic_utils.py @@ -3,10 +3,9 @@ import json import os import unittest -from datetime import datetime, date +from datetime import date, datetime from decimal import Decimal -from typing import Literal, Any -from typing import Union, List +from typing import Any, List, Literal, Union from jsonpath_ng.ext import parse diff --git a/backend/tests/testing_utils/immunization_utils.py b/backend/tests/testing_utils/immunization_utils.py index f25f69a22..cd69b1ba8 100644 --- a/backend/tests/testing_utils/immunization_utils.py +++ b/backend/tests/testing_utils/immunization_utils.py @@ -1,7 +1,6 @@ """Immunization utils.""" from fhir.resources.R4B.immunization import Immunization - from testing_utils.generic_utils import load_json_data from testing_utils.values_for_tests import ValidValues @@ -21,9 +20,9 @@ def create_covid_19_immunization_dict( immunization_json = load_json_data("completed_covid19_immunization_event.json") immunization_json["id"] = imms_id - [x for x in immunization_json["contained"] if x.get("resourceType") == "Patient"][0]["identifier"][0][ - "value" - ] = nhs_number + [x for x in immunization_json["contained"] if x.get("resourceType") == "Patient"][0]["identifier"][0]["value"] = ( + nhs_number + ) immunization_json["occurrenceDateTime"] = occurrence_date_time @@ -35,9 +34,9 @@ def create_covid_19_immunization_dict_no_id( ): immunization_json = load_json_data("completed_covid19_immunization_event.json") - [x for x in immunization_json["contained"] if x.get("resourceType") == "Patient"][0]["identifier"][0][ - "value" - ] = nhs_number + [x for x in immunization_json["contained"] if x.get("resourceType") == "Patient"][0]["identifier"][0]["value"] = ( + nhs_number + ) immunization_json["occurrenceDateTime"] = occurrence_date_time diff --git a/backend/tests/testing_utils/pre_validation_test_utils.py b/backend/tests/testing_utils/pre_validation_test_utils.py index 9f8d5f36b..2abd1c0ac 100644 --- a/backend/tests/testing_utils/pre_validation_test_utils.py +++ b/backend/tests/testing_utils/pre_validation_test_utils.py @@ -6,8 +6,8 @@ from jsonpath_ng.ext import parse from .generic_utils import ( - test_valid_values_accepted, test_invalid_values_rejected, + test_valid_values_accepted, ) from .values_for_tests import InvalidDataTypes, InvalidValues, ValidValues diff --git a/backend/tests/testing_utils/test_utils_for_batch.py b/backend/tests/testing_utils/test_utils_for_batch.py index ce7ee25ef..ce5c57955 100644 --- a/backend/tests/testing_utils/test_utils_for_batch.py +++ b/backend/tests/testing_utils/test_utils_for_batch.py @@ -2,7 +2,6 @@ class ForwarderValues: - MOCK_ENVIRONMENT_DICT = { "DYNAMODB_TABLE_NAME": "immunisation-batch-internal-dev-imms-test-table", "ENVIRONMENT": "internal-dev-test", diff --git a/backend/tests/utils/test_dict_utils.py b/backend/tests/utils/test_dict_utils.py index 718fac210..161c632c4 100644 --- a/backend/tests/utils/test_dict_utils.py +++ b/backend/tests/utils/test_dict_utils.py @@ -4,7 +4,6 @@ class TestDictUtils(unittest.TestCase): - def test_get_field_returns_none_if_value_is_not_dict(self): """Test that the default None value is returned if the provided argument is not a dict""" result = dict_utils.get_field(["test"], "test_key") diff --git a/batch_processor_filter/src/batch_audit_repository.py b/batch_processor_filter/src/batch_audit_repository.py index 4f0f3ef4a..f3e15e751 100644 --- a/batch_processor_filter/src/batch_audit_repository.py +++ b/batch_processor_filter/src/batch_audit_repository.py @@ -1,13 +1,12 @@ import boto3 from boto3.dynamodb.conditions import Key - from constants import ( + AUDIT_TABLE_FILENAME_GSI, AUDIT_TABLE_NAME, + AUDIT_TABLE_QUEUE_NAME_GSI, REGION_NAME, - AUDIT_TABLE_FILENAME_GSI, AuditTableKeys, FileStatus, - AUDIT_TABLE_QUEUE_NAME_GSI, ) diff --git a/batch_processor_filter/src/batch_file_repository.py b/batch_processor_filter/src/batch_file_repository.py index 239016e10..e30746cbb 100644 --- a/batch_processor_filter/src/batch_file_repository.py +++ b/batch_processor_filter/src/batch_file_repository.py @@ -1,12 +1,11 @@ """Module for the batch file repository""" from csv import writer -from io import StringIO, BytesIO +from io import BytesIO, StringIO import boto3 - from batch_file_created_event import BatchFileCreatedEvent -from constants import SOURCE_BUCKET_NAME, ACK_BUCKET_NAME +from constants import ACK_BUCKET_NAME, SOURCE_BUCKET_NAME class BatchFileRepository: diff --git a/batch_processor_filter/src/batch_processor_filter_service.py b/batch_processor_filter/src/batch_processor_filter_service.py index 7f4bcfe0b..988e652b1 100644 --- a/batch_processor_filter/src/batch_processor_filter_service.py +++ b/batch_processor_filter/src/batch_processor_filter_service.py @@ -1,14 +1,12 @@ """Batch processor filter service module""" -import boto3 import json +import boto3 from batch_audit_repository import BatchAuditRepository - from batch_file_created_event import BatchFileCreatedEvent from batch_file_repository import BatchFileRepository - -from constants import REGION_NAME, FileStatus, QUEUE_URL, FileNotProcessedReason +from constants import QUEUE_URL, REGION_NAME, FileNotProcessedReason, FileStatus from exceptions import EventAlreadyProcessingForSupplierAndVaccTypeError from logger import logger from send_log_to_firehose import send_log_to_firehose diff --git a/batch_processor_filter/src/lambda_handler.py b/batch_processor_filter/src/lambda_handler.py index b55a875d7..0ba0ca202 100644 --- a/batch_processor_filter/src/lambda_handler.py +++ b/batch_processor_filter/src/lambda_handler.py @@ -1,12 +1,11 @@ import json -from aws_lambda_typing import events, context +from aws_lambda_typing import context, events from batch_file_created_event import BatchFileCreatedEvent from batch_processor_filter_service import BatchProcessorFilterService from exception_decorator import exception_decorator from exceptions import InvalidBatchSizeError - service = BatchProcessorFilterService() diff --git a/batch_processor_filter/src/logger.py b/batch_processor_filter/src/logger.py index c6e5a24c4..47e9168b3 100644 --- a/batch_processor_filter/src/logger.py +++ b/batch_processor_filter/src/logger.py @@ -2,7 +2,6 @@ import logging - logging.basicConfig(level="INFO") logger = logging.getLogger() logger.setLevel("INFO") diff --git a/batch_processor_filter/src/send_log_to_firehose.py b/batch_processor_filter/src/send_log_to_firehose.py index a89e181cc..be04327b7 100644 --- a/batch_processor_filter/src/send_log_to_firehose.py +++ b/batch_processor_filter/src/send_log_to_firehose.py @@ -1,7 +1,7 @@ import json -import boto3 -from constants import SPLUNK_FIREHOSE_STREAM_NAME, REGION_NAME +import boto3 +from constants import REGION_NAME, SPLUNK_FIREHOSE_STREAM_NAME firehose_client = boto3.client("firehose", region_name=REGION_NAME) diff --git a/batch_processor_filter/tests/test_lambda_handler.py b/batch_processor_filter/tests/test_lambda_handler.py index 16d0c758d..69a35b90b 100644 --- a/batch_processor_filter/tests/test_lambda_handler.py +++ b/batch_processor_filter/tests/test_lambda_handler.py @@ -1,36 +1,34 @@ +import copy import json from json import JSONDecodeError - -import boto3 -import copy from unittest import TestCase -from unittest.mock import patch, Mock, ANY, call +from unittest.mock import ANY, Mock, call, patch +import boto3 import botocore -from moto import mock_aws - from batch_file_created_event import BatchFileCreatedEvent from exceptions import ( - InvalidBatchSizeError, EventAlreadyProcessingForSupplierAndVaccTypeError, + InvalidBatchSizeError, ) +from moto import mock_aws from testing_utils import ( MOCK_ENVIRONMENT_DICT, - make_sqs_record, add_entry_to_mock_table, get_audit_entry_status_by_id, + make_sqs_record, ) with patch.dict("os.environ", MOCK_ENVIRONMENT_DICT): - from lambda_handler import lambda_handler from constants import ( + AUDIT_TABLE_FILENAME_GSI, AUDIT_TABLE_NAME, + AUDIT_TABLE_QUEUE_NAME_GSI, REGION_NAME, AuditTableKeys, - AUDIT_TABLE_FILENAME_GSI, - AUDIT_TABLE_QUEUE_NAME_GSI, FileStatus, ) + from lambda_handler import lambda_handler sqs_client = boto3.client("sqs", region_name=REGION_NAME) dynamodb_client = boto3.client("dynamodb", region_name=REGION_NAME) @@ -123,7 +121,7 @@ def _assert_source_file_moved(self, filename: str): self.assertEqual( str(exc.exception), - "An error occurred (NoSuchKey) when calling the GetObject " "operation: The specified key does not exist.", + "An error occurred (NoSuchKey) when calling the GetObject operation: The specified key does not exist.", ) archived_object = s3_client.get_object(Bucket=self.mock_source_bucket, Key=f"archive/{filename}") self.assertIsNotNone(archived_object) @@ -287,7 +285,7 @@ def test_lambda_handler_processes_event_successfully(self): ) expected_success_log_message = ( - f"File forwarded for processing by ECS. Filename: " f"{self.default_batch_file_event['filename']}" + f"File forwarded for processing by ECS. Filename: {self.default_batch_file_event['filename']}" ) self.mock_logger.info.assert_has_calls( [ diff --git a/batch_processor_filter/tests/testing_utils.py b/batch_processor_filter/tests/testing_utils.py index 604a9c973..bae2a8201 100644 --- a/batch_processor_filter/tests/testing_utils.py +++ b/batch_processor_filter/tests/testing_utils.py @@ -3,7 +3,6 @@ from aws_lambda_typing.events.sqs import SQSMessage from batch_file_created_event import BatchFileCreatedEvent - MOCK_ENVIRONMENT_DICT = { "AUDIT_TABLE_NAME": "immunisation-batch-internal-dev-audit-table", "QUEUE_URL": "https://sqs.eu-west-2.amazonaws.com/123456789012/imms-batch-metadata-queue.fifo", @@ -32,7 +31,7 @@ def add_entry_to_mock_table( """Add an entry to the audit table""" audit_table_entry = { "message_id": {"S": batch_file_created_event.get("message_id")}, - "queue_name": {"S": f'{batch_file_created_event["supplier"]}_{batch_file_created_event["vaccine_type"]}'}, + "queue_name": {"S": f"{batch_file_created_event['supplier']}_{batch_file_created_event['vaccine_type']}"}, "filename": {"S": batch_file_created_event.get("filename")}, "status": {"S": status}, } diff --git a/delta_backend/src/converter.py b/delta_backend/src/converter.py index 80c0510e3..830f8571d 100644 --- a/delta_backend/src/converter.py +++ b/delta_backend/src/converter.py @@ -1,12 +1,11 @@ # Main validation engine import exception_messages from common.mappings import ActionFlag -from conversion_layout import ConversionLayout, ConversionField +from conversion_layout import ConversionField, ConversionLayout from extractor import Extractor class Converter: - def __init__(self, fhir_data, action_flag=ActionFlag.UPDATE, report_unexpected_exception=True): self.converted = {} self.error_records = [] diff --git a/delta_backend/src/delta.py b/delta_backend/src/delta.py index 155772363..4c2eebddf 100644 --- a/delta_backend/src/delta.py +++ b/delta_backend/src/delta.py @@ -3,13 +3,12 @@ import logging import os import time -from datetime import datetime, timedelta, UTC +from datetime import UTC, datetime, timedelta import boto3 from boto3.dynamodb.conditions import Attr from botocore.exceptions import ClientError - -from common.mappings import ActionFlag, Operation, EventName +from common.mappings import ActionFlag, EventName, Operation from converter import Converter from log_firehose import FirehoseLogger diff --git a/delta_backend/src/extractor.py b/delta_backend/src/extractor.py index 02d6e5404..8c40bbf6d 100644 --- a/delta_backend/src/extractor.py +++ b/delta_backend/src/extractor.py @@ -3,11 +3,10 @@ from datetime import datetime, timedelta, timezone import exception_messages -from common.mappings import Gender, ConversionFieldName +from common.mappings import ConversionFieldName, Gender class Extractor: - # This file holds the schema/base layout that maps FHIR fields to flat JSON fields # Each entry tells the converter how to extract and transform a specific value EXTENSION_URL_VACCINATION_PRODEDURE = ( @@ -40,7 +39,6 @@ def _get_patient(self): ) def _get_valid_names(self, names, occurrence_time): - official_names = [n for n in names if n.get("use") == "official" and self._is_current_period(n, occurrence_time)] if official_names: return official_names[0] diff --git a/delta_backend/src/log_firehose.py b/delta_backend/src/log_firehose.py index c5c8134f4..66d2000e6 100644 --- a/delta_backend/src/log_firehose.py +++ b/delta_backend/src/log_firehose.py @@ -2,7 +2,6 @@ import logging import os - import boto3 from botocore.config import Config diff --git a/delta_backend/tests/check_conversion.py b/delta_backend/tests/check_conversion.py index ff4918a7e..698a9c30b 100644 --- a/delta_backend/tests/check_conversion.py +++ b/delta_backend/tests/check_conversion.py @@ -2,8 +2,8 @@ import json import os import sys -from converter import Converter +from converter import Converter sys.path.insert(0, os.path.abspath(os.path.join(os.path.dirname(__file__), "..", "src"))) diff --git a/delta_backend/tests/test_convert.py b/delta_backend/tests/test_convert.py index 08c05d7f6..6eaaca3cb 100644 --- a/delta_backend/tests/test_convert.py +++ b/delta_backend/tests/test_convert.py @@ -4,10 +4,11 @@ from copy import deepcopy from datetime import datetime from unittest.mock import patch -from moto import mock_aws + from boto3 import resource as boto3_resource -from utils_for_converter_tests import ValuesForTests, ErrorValuesForTests -from common.mappings import ActionFlag, Operation, EventName +from common.mappings import ActionFlag, EventName, Operation +from moto import mock_aws +from utils_for_converter_tests import ErrorValuesForTests, ValuesForTests MOCK_ENV_VARS = { "AWS_SQS_QUEUE_URL": "https://sqs.eu-west-2.amazonaws.com/123456789012/test-queue", @@ -17,7 +18,7 @@ } request_json_data = ValuesForTests.json_data with patch.dict("os.environ", MOCK_ENV_VARS): - from delta import handler, Converter + from delta import Converter, handler @patch.dict("os.environ", MOCK_ENV_VARS, clear=True) @@ -214,7 +215,6 @@ def test_handler_imms_convert_to_flat_json(self): for test_case in expected_action_flags: with self.subTest(test_case["Operation"]): - event = self.get_event(operation=test_case["Operation"]) response = handler(event, None) diff --git a/delta_backend/tests/test_convert_dates.py b/delta_backend/tests/test_convert_dates.py index 15104f3ea..c3e859761 100644 --- a/delta_backend/tests/test_convert_dates.py +++ b/delta_backend/tests/test_convert_dates.py @@ -1,13 +1,13 @@ import copy import json import unittest -from utils_for_converter_tests import ValuesForTests -from converter import Converter + from common.mappings import ConversionFieldName +from converter import Converter +from utils_for_converter_tests import ValuesForTests class TestDateConversions(unittest.TestCase): - def setUp(self): self.request_json_data = copy.deepcopy(ValuesForTests.json_data) diff --git a/delta_backend/tests/test_convert_dose_amount.py b/delta_backend/tests/test_convert_dose_amount.py index e359c55c9..b3a04c18b 100644 --- a/delta_backend/tests/test_convert_dose_amount.py +++ b/delta_backend/tests/test_convert_dose_amount.py @@ -2,13 +2,13 @@ import decimal import json import unittest -from utils_for_converter_tests import ValuesForTests -from converter import Converter + from common.mappings import ConversionFieldName +from converter import Converter +from utils_for_converter_tests import ValuesForTests class TestDoseAmountTypeUriToFlatJson(unittest.TestCase): - def setUp(self): self.request_json_data = copy.deepcopy(ValuesForTests.json_data) diff --git a/delta_backend/tests/test_convert_dose_sequence.py b/delta_backend/tests/test_convert_dose_sequence.py index 99e1bc54d..c06535ba3 100644 --- a/delta_backend/tests/test_convert_dose_sequence.py +++ b/delta_backend/tests/test_convert_dose_sequence.py @@ -1,13 +1,13 @@ import copy import json import unittest -from utils_for_converter_tests import ValuesForTests -from converter import Converter + from common.mappings import ConversionFieldName +from converter import Converter +from utils_for_converter_tests import ValuesForTests class TestDoseSequenceToFlatJson(unittest.TestCase): - def setUp(self): self.request_json_data = copy.deepcopy(ValuesForTests.json_data) diff --git a/delta_backend/tests/test_convert_location_code.py b/delta_backend/tests/test_convert_location_code.py index 0dab0ed83..378cb7438 100644 --- a/delta_backend/tests/test_convert_location_code.py +++ b/delta_backend/tests/test_convert_location_code.py @@ -1,13 +1,13 @@ import copy import json import unittest -from utils_for_converter_tests import ValuesForTests -from converter import Converter + from common.mappings import ConversionFieldName +from converter import Converter +from utils_for_converter_tests import ValuesForTests class TestLocationCode(unittest.TestCase): - def setUp(self): self.request_json_data = copy.deepcopy(ValuesForTests.json_data) diff --git a/delta_backend/tests/test_convert_location_code_type_uri.py b/delta_backend/tests/test_convert_location_code_type_uri.py index 718697109..0e03c49f3 100644 --- a/delta_backend/tests/test_convert_location_code_type_uri.py +++ b/delta_backend/tests/test_convert_location_code_type_uri.py @@ -1,13 +1,13 @@ import copy import json import unittest -from utils_for_converter_tests import ValuesForTests -from converter import Converter + from common.mappings import ConversionFieldName +from converter import Converter +from utils_for_converter_tests import ValuesForTests class TestLocationCodeTypeUri(unittest.TestCase): - def setUp(self): self.request_json_data = copy.deepcopy(ValuesForTests.json_data) diff --git a/delta_backend/tests/test_convert_lot_number.py b/delta_backend/tests/test_convert_lot_number.py index 95ac7d6e4..c2a0bdbd5 100644 --- a/delta_backend/tests/test_convert_lot_number.py +++ b/delta_backend/tests/test_convert_lot_number.py @@ -1,13 +1,13 @@ import copy import json import unittest -from utils_for_converter_tests import ValuesForTests -from converter import Converter + from common.mappings import ConversionFieldName +from converter import Converter +from utils_for_converter_tests import ValuesForTests class TestBatchNumber(unittest.TestCase): - def setUp(self): self.request_json_data = copy.deepcopy(ValuesForTests.json_data) diff --git a/delta_backend/tests/test_convert_manufacturer.py b/delta_backend/tests/test_convert_manufacturer.py index 5089abbd4..f97a50a6d 100644 --- a/delta_backend/tests/test_convert_manufacturer.py +++ b/delta_backend/tests/test_convert_manufacturer.py @@ -1,13 +1,13 @@ import copy import json import unittest -from utils_for_converter_tests import ValuesForTests -from converter import Converter + from common.mappings import ConversionFieldName +from converter import Converter +from utils_for_converter_tests import ValuesForTests class TestVaccineManufacturer(unittest.TestCase): - def setUp(self): self.request_json_data = copy.deepcopy(ValuesForTests.json_data) diff --git a/delta_backend/tests/test_convert_nhs_number.py b/delta_backend/tests/test_convert_nhs_number.py index 57fb29198..9681d99b5 100644 --- a/delta_backend/tests/test_convert_nhs_number.py +++ b/delta_backend/tests/test_convert_nhs_number.py @@ -1,13 +1,13 @@ import copy import json import unittest -from utils_for_converter_tests import ValuesForTests -from converter import Converter + from common.mappings import ConversionFieldName +from converter import Converter +from utils_for_converter_tests import ValuesForTests class TestNHSNumberToFlatJson(unittest.TestCase): - def setUp(self): self.request_json_data = copy.deepcopy(ValuesForTests.json_data) diff --git a/delta_backend/tests/test_convert_person_forename.py b/delta_backend/tests/test_convert_person_forename.py index e813eba8b..b42b844e1 100644 --- a/delta_backend/tests/test_convert_person_forename.py +++ b/delta_backend/tests/test_convert_person_forename.py @@ -1,9 +1,10 @@ import copy import json import unittest -from utils_for_converter_tests import ValuesForTests -from converter import Converter + from common.mappings import ConversionFieldName +from converter import Converter +from utils_for_converter_tests import ValuesForTests class TestPersonForenmeToFlatJson(unittest.TestCase): diff --git a/delta_backend/tests/test_convert_person_gender.py b/delta_backend/tests/test_convert_person_gender.py index 064c1e7b9..548ead88f 100644 --- a/delta_backend/tests/test_convert_person_gender.py +++ b/delta_backend/tests/test_convert_person_gender.py @@ -1,13 +1,13 @@ import copy import json import unittest -from utils_for_converter_tests import ValuesForTests -from converter import Converter + from common.mappings import ConversionFieldName +from converter import Converter +from utils_for_converter_tests import ValuesForTests class TestPersonGenderToFlatJson(unittest.TestCase): - def setUp(self): self.request_json_data = copy.deepcopy(ValuesForTests.json_data) diff --git a/delta_backend/tests/test_convert_person_surname.py b/delta_backend/tests/test_convert_person_surname.py index 8d1331abe..a485d2266 100644 --- a/delta_backend/tests/test_convert_person_surname.py +++ b/delta_backend/tests/test_convert_person_surname.py @@ -1,9 +1,10 @@ import copy import json import unittest -from utils_for_converter_tests import ValuesForTests -from converter import Converter + from common.mappings import ConversionFieldName +from converter import Converter +from utils_for_converter_tests import ValuesForTests class TestPersonSurnameToFlatJson(unittest.TestCase): diff --git a/delta_backend/tests/test_convert_post_code.py b/delta_backend/tests/test_convert_post_code.py index a07dc6d63..e8958b8c1 100644 --- a/delta_backend/tests/test_convert_post_code.py +++ b/delta_backend/tests/test_convert_post_code.py @@ -1,13 +1,13 @@ import copy import json import unittest -from utils_for_converter_tests import ValuesForTests -from converter import Converter + from common.mappings import ConversionFieldName +from converter import Converter +from utils_for_converter_tests import ValuesForTests class TestPersonPostalCodeToFlatJson(unittest.TestCase): - def setUp(self): self.request_json_data = copy.deepcopy(ValuesForTests.json_data) diff --git a/delta_backend/tests/test_convert_practitioner_forename.py b/delta_backend/tests/test_convert_practitioner_forename.py index 38edc88b9..e5a7aba90 100644 --- a/delta_backend/tests/test_convert_practitioner_forename.py +++ b/delta_backend/tests/test_convert_practitioner_forename.py @@ -1,13 +1,13 @@ import copy import json import unittest -from utils_for_converter_tests import ValuesForTests -from converter import Converter + from common.mappings import ConversionFieldName +from converter import Converter +from utils_for_converter_tests import ValuesForTests class TestPractitionerForenameToFlatJson(unittest.TestCase): - def setUp(self): self.request_json_data = copy.deepcopy(ValuesForTests.json_data) diff --git a/delta_backend/tests/test_convert_practitioner_surname.py b/delta_backend/tests/test_convert_practitioner_surname.py index b8bf1919b..d7e820aa9 100644 --- a/delta_backend/tests/test_convert_practitioner_surname.py +++ b/delta_backend/tests/test_convert_practitioner_surname.py @@ -1,13 +1,13 @@ import copy import json import unittest -from utils_for_converter_tests import ValuesForTests -from converter import Converter + from common.mappings import ConversionFieldName +from converter import Converter +from utils_for_converter_tests import ValuesForTests class TestPractitionerSurnameToFlatJson(unittest.TestCase): - def setUp(self): self.request_json_data = copy.deepcopy(ValuesForTests.json_data) diff --git a/delta_backend/tests/test_convert_primary_source.py b/delta_backend/tests/test_convert_primary_source.py index e5b94b261..f52e70953 100644 --- a/delta_backend/tests/test_convert_primary_source.py +++ b/delta_backend/tests/test_convert_primary_source.py @@ -1,13 +1,13 @@ import copy import json import unittest -from utils_for_converter_tests import ValuesForTests -from converter import Converter + from common.mappings import ConversionFieldName +from converter import Converter +from utils_for_converter_tests import ValuesForTests class TestPrimarySourceFlatJson(unittest.TestCase): - def setUp(self): self.request_json_data = copy.deepcopy(ValuesForTests.json_data) diff --git a/delta_backend/tests/test_convert_site_code.py b/delta_backend/tests/test_convert_site_code.py index 5eab55b0b..68dd1b2dc 100644 --- a/delta_backend/tests/test_convert_site_code.py +++ b/delta_backend/tests/test_convert_site_code.py @@ -1,13 +1,13 @@ import copy import json import unittest -from utils_for_converter_tests import ValuesForTests -from converter import Converter + from common.mappings import ConversionFieldName +from converter import Converter +from utils_for_converter_tests import ValuesForTests class TestSiteCodeToFlatJson(unittest.TestCase): - def setUp(self): self.request_json_data = copy.deepcopy(ValuesForTests.json_data) diff --git a/delta_backend/tests/test_convert_site_uri.py b/delta_backend/tests/test_convert_site_uri.py index 6d29b25e3..d5ddbff8e 100644 --- a/delta_backend/tests/test_convert_site_uri.py +++ b/delta_backend/tests/test_convert_site_uri.py @@ -1,13 +1,13 @@ import copy import json import unittest -from utils_for_converter_tests import ValuesForTests -from converter import Converter + from common.mappings import ConversionFieldName +from converter import Converter +from utils_for_converter_tests import ValuesForTests class TestSiteUriToFlatJson(unittest.TestCase): - def setUp(self): self.request_json_data = copy.deepcopy(ValuesForTests.json_data) diff --git a/delta_backend/tests/test_convert_snomed_codes.py b/delta_backend/tests/test_convert_snomed_codes.py index 593eb08b4..367a9f189 100644 --- a/delta_backend/tests/test_convert_snomed_codes.py +++ b/delta_backend/tests/test_convert_snomed_codes.py @@ -1,13 +1,13 @@ import copy import json import unittest -from utils_for_converter_tests import ValuesForTests -from converter import Converter + from common.mappings import ConversionFieldName +from converter import Converter +from utils_for_converter_tests import ValuesForTests class TestSNOMEDToFlatJson(unittest.TestCase): - def setUp(self): self.request_json_data = copy.deepcopy(ValuesForTests.json_data) diff --git a/delta_backend/tests/test_convert_snomed_terms.py b/delta_backend/tests/test_convert_snomed_terms.py index 057e983f0..4d4882ac0 100644 --- a/delta_backend/tests/test_convert_snomed_terms.py +++ b/delta_backend/tests/test_convert_snomed_terms.py @@ -1,13 +1,13 @@ import copy import json import unittest -from utils_for_converter_tests import ValuesForTests -from converter import Converter + from common.mappings import ConversionFieldName +from converter import Converter +from utils_for_converter_tests import ValuesForTests class TestSNOMEDTermsToFlatJson(unittest.TestCase): - def setUp(self): self.request_json_data = copy.deepcopy(ValuesForTests.json_data) diff --git a/delta_backend/tests/test_convert_unique_id.py b/delta_backend/tests/test_convert_unique_id.py index 277118108..e7a7be93f 100644 --- a/delta_backend/tests/test_convert_unique_id.py +++ b/delta_backend/tests/test_convert_unique_id.py @@ -1,13 +1,13 @@ import copy import json import unittest -from utils_for_converter_tests import ValuesForTests -from converter import Converter + from common.mappings import ConversionFieldName +from converter import Converter +from utils_for_converter_tests import ValuesForTests class TestUniqueId(unittest.TestCase): - def setUp(self): self.request_json_data = copy.deepcopy(ValuesForTests.json_data) diff --git a/delta_backend/tests/test_convert_unique_id_uri.py b/delta_backend/tests/test_convert_unique_id_uri.py index 379087f2f..ae5447811 100644 --- a/delta_backend/tests/test_convert_unique_id_uri.py +++ b/delta_backend/tests/test_convert_unique_id_uri.py @@ -1,13 +1,13 @@ import copy import json import unittest -from utils_for_converter_tests import ValuesForTests -from converter import Converter + from common.mappings import ConversionFieldName +from converter import Converter +from utils_for_converter_tests import ValuesForTests class TestUniqueIdUri(unittest.TestCase): - def setUp(self): self.request_json_data = copy.deepcopy(ValuesForTests.json_data) diff --git a/delta_backend/tests/test_current_period.py b/delta_backend/tests/test_current_period.py index f4b75ec57..c8dd60bc7 100644 --- a/delta_backend/tests/test_current_period.py +++ b/delta_backend/tests/test_current_period.py @@ -1,5 +1,6 @@ import unittest from datetime import datetime, timezone + from extractor import Extractor diff --git a/delta_backend/tests/test_delta.py b/delta_backend/tests/test_delta.py index 7add6fe1b..7df127c7f 100644 --- a/delta_backend/tests/test_delta.py +++ b/delta_backend/tests/test_delta.py @@ -1,17 +1,18 @@ -import unittest -from unittest.mock import patch, MagicMock -from botocore.exceptions import ClientError -import os -import json import decimal -from common.mappings import EventName, Operation, ActionFlag -from utils_for_converter_tests import ValuesForTests, RecordConfig +import json +import os +import unittest +from unittest.mock import MagicMock, patch + import delta +from botocore.exceptions import ClientError +from common.mappings import ActionFlag, EventName, Operation from delta import ( - send_message, handler, process_record, + send_message, ) +from utils_for_converter_tests import RecordConfig, ValuesForTests TEST_QUEUE_URL = "https://sqs.eu-west-2.amazonaws.com/123456789012/test-queue" os.environ["AWS_SQS_QUEUE_URL"] = TEST_QUEUE_URL @@ -26,7 +27,6 @@ class DeltaHandlerTestCase(unittest.TestCase): - # TODO refactor for dependency injection, eg process_record, send_firehose etc def setUp(self): self.logger_info_patcher = patch("logging.Logger.info") @@ -501,7 +501,6 @@ def test_handler_sends_all_to_firehose(self, mock_send_firehose, mock_process_re class DeltaRecordProcessorTestCase(unittest.TestCase): - def setUp(self): self.logger_info_patcher = patch("logging.Logger.info") self.mock_logger_info = self.logger_info_patcher.start() @@ -525,7 +524,6 @@ def tearDown(self): self.delta_table_patcher.stop() def test_multi_record_success(self): - # Arrange self.mock_delta_table.put_item.return_value = SUCCESS_RESPONSE test_configs = [ @@ -557,7 +555,6 @@ def test_multi_record_success(self): self.assertEqual(self.mock_logger_warning.call_count, 0) def test_multi_record_success_with_fail(self): - # Arrange expected_returns = [True, False, True] self.mock_delta_table.put_item.side_effect = [ @@ -589,7 +586,6 @@ def test_multi_record_success_with_fail(self): self.assertEqual(self.mock_logger_error.call_count, 1) def test_single_record_table_exception(self): - # Arrange imms_id = "exception-id" record = ValuesForTests.get_event_record( @@ -613,7 +609,6 @@ def test_single_record_table_exception(self): @patch("delta.json.loads") def test_json_loads_called_with_parse_float_decimal(self, mock_json_loads): - # Arrange record = ValuesForTests.get_event_record(imms_id="id", event_name=EventName.UPDATE, operation=Operation.UPDATE) @@ -640,7 +635,6 @@ def tearDown(self): self.logger_error_patcher.stop() def test_returns_table_on_success(self): - table = delta.get_delta_table() self.assertIs(table, self.mock_delta_table) # Should cache the table diff --git a/delta_backend/tests/test_log_firehose.py b/delta_backend/tests/test_log_firehose.py index 6df02bf87..6e90f3436 100644 --- a/delta_backend/tests/test_log_firehose.py +++ b/delta_backend/tests/test_log_firehose.py @@ -1,11 +1,11 @@ -import unittest -from unittest.mock import patch, MagicMock import json +import unittest +from unittest.mock import MagicMock, patch + from log_firehose import FirehoseLogger class TestFirehoseLogger(unittest.TestCase): - def setUp(self): # Common setup if needed self.context = {} diff --git a/delta_backend/tests/test_utils.py b/delta_backend/tests/test_utils.py index 0fcacb61a..a0a2f483e 100644 --- a/delta_backend/tests/test_utils.py +++ b/delta_backend/tests/test_utils.py @@ -1,9 +1,9 @@ import unittest + from utils import is_valid_simple_snomed class TestIsValidSimpleSnomed(unittest.TestCase): - def test_valid_snomed(self): valid_snomed = "956951000000104" self.assertTrue(is_valid_simple_snomed(valid_snomed)) diff --git a/delta_backend/tests/utils_for_converter_tests.py b/delta_backend/tests/utils_for_converter_tests.py index 6919650be..d956d6a6e 100644 --- a/delta_backend/tests/utils_for_converter_tests.py +++ b/delta_backend/tests/utils_for_converter_tests.py @@ -1,9 +1,10 @@ +import json import uuid from decimal import Decimal -import json -from common.mappings import EventName, Operation from typing import List +from common.mappings import EventName, Operation + class RecordConfig: def __init__(self, event_name, operation, imms_id, expected_action_flag=None, supplier="EMIS"): @@ -15,7 +16,6 @@ def __init__(self, event_name, operation, imms_id, expected_action_flag=None, su class ValuesForTests: - MOCK_ENVIRONMENT_DICT = { "DYNAMODB_TABLE_NAME": "immunisation-batch-internal-dev-imms-test-table", "ENVIRONMENT": "internal-dev-test", @@ -346,7 +346,6 @@ def get_expected_imms(expected_action_flag): class ErrorValuesForTests: - json_dob_error = { "resourceType": "Immunization", "contained": [ diff --git a/e2e/lib/apigee.py b/e2e/lib/apigee.py index ba3f757cd..750b55701 100644 --- a/e2e/lib/apigee.py +++ b/e2e/lib/apigee.py @@ -1,5 +1,5 @@ import inspect -from dataclasses import dataclass, field, asdict +from dataclasses import asdict, dataclass, field from enum import StrEnum from typing import List diff --git a/e2e/lib/authentication.py b/e2e/lib/authentication.py index 9400ebcab..c79024e73 100644 --- a/e2e/lib/authentication.py +++ b/e2e/lib/authentication.py @@ -3,7 +3,7 @@ from dataclasses import dataclass from enum import Enum from time import time -from urllib.parse import urlparse, parse_qs +from urllib.parse import parse_qs, urlparse import jwt import requests diff --git a/e2e/lib/env.py b/e2e/lib/env.py index d9df56146..14f10c818 100644 --- a/e2e/lib/env.py +++ b/e2e/lib/env.py @@ -23,8 +23,7 @@ def get_apigee_env() -> ApigeeEnv: logging.error(f'the environment variable "APIGEE_ENVIRONMENT: {env}" is invalid') else: logging.warning( - 'the environment variable "APIGEE_ENVIRONMENT" is empty, ' - 'falling back to the default value: "internal-dev"' + 'the environment variable "APIGEE_ENVIRONMENT" is empty, falling back to the default value: "internal-dev"' ) return ApigeeEnv.INTERNAL_DEV diff --git a/e2e/test_authorization.py b/e2e/test_authorization.py index 1b112568f..4ad8192c9 100644 --- a/e2e/test_authorization.py +++ b/e2e/test_authorization.py @@ -11,7 +11,7 @@ from lib.env import get_auth_url, get_proxy_name, get_service_base_path from utils.authorization import Permission, app_full_access from utils.base_test import ImmunizationBaseTest -from utils.constants import valid_nhs_number1, cis2_user +from utils.constants import cis2_user, valid_nhs_number1 from utils.factories import make_app_restricted_app, make_cis2_app from utils.immunisation_api import ImmunisationApi from utils.mappings import VaccineTypes @@ -20,7 +20,6 @@ @unittest.skip("Skipping this entire test suite for now") class TestApplicationRestrictedAuthorization(ImmunizationBaseTest): - my_app: ApigeeApp my_imms_api: ImmunisationApi diff --git a/e2e/test_create_immunization.py b/e2e/test_create_immunization.py index 10d592b4f..dd2cc6cf8 100644 --- a/e2e/test_create_immunization.py +++ b/e2e/test_create_immunization.py @@ -3,7 +3,6 @@ class TestCreateImmunization(ImmunizationBaseTest): - def test_create_imms(self): """it should create a FHIR Immunization resource""" for imms_api in self.imms_apis: diff --git a/e2e/test_delete_immunization.py b/e2e/test_delete_immunization.py index ffcd79106..18ba5e369 100644 --- a/e2e/test_delete_immunization.py +++ b/e2e/test_delete_immunization.py @@ -4,7 +4,6 @@ class TestDeleteImmunization(ImmunizationBaseTest): - def test_delete_imms(self): """it should delete a FHIR Immunization resource""" for imms_api in self.imms_apis: diff --git a/e2e/test_delta_immunization.py b/e2e/test_delta_immunization.py index 87689a3a1..89f63bb86 100644 --- a/e2e/test_delta_immunization.py +++ b/e2e/test_delta_immunization.py @@ -9,7 +9,6 @@ class TestDeltaImmunization(ImmunizationBaseTest): - CREATE_OPERATION = "CREATE" UPDATE_OPERATION = "UPDATE" DELETE_OPERATION = "DELETE" diff --git a/e2e/test_deployment.py b/e2e/test_deployment.py index 88b03c469..3a3c759c4 100644 --- a/e2e/test_deployment.py +++ b/e2e/test_deployment.py @@ -2,11 +2,10 @@ from time import sleep import requests - from lib.env import ( get_service_base_path, - get_status_endpoint_api_key, get_source_commit_id, + get_status_endpoint_api_key, ) """Tests in this package don't really test anything. Platform created these tests to check if the current diff --git a/e2e/test_get_immunization.py b/e2e/test_get_immunization.py index 0d8e982c3..60e8b3460 100644 --- a/e2e/test_get_immunization.py +++ b/e2e/test_get_immunization.py @@ -4,11 +4,10 @@ from utils.base_test import ImmunizationBaseTest from utils.immunisation_api import parse_location from utils.mappings import EndpointOperationNames, VaccineTypes -from utils.resource import generate_imms_resource, generate_filtered_imms_resource +from utils.resource import generate_filtered_imms_resource, generate_imms_resource class TestGetImmunization(ImmunizationBaseTest): - def test_get_imms(self): """it should get a FHIR Immunization resource""" for imms_api in self.imms_apis: diff --git a/e2e/test_proxy.py b/e2e/test_proxy.py index ead3f24b0..2f2fdc962 100644 --- a/e2e/test_proxy.py +++ b/e2e/test_proxy.py @@ -4,13 +4,11 @@ import uuid import requests - from lib.env import get_service_base_path, get_status_endpoint_api_key from utils.immunisation_api import ImmunisationApi class TestProxyHealthcheck(unittest.TestCase): - proxy_url: str status_api_key: str @@ -80,8 +78,7 @@ def get_backend_url() -> str: return res.stdout except FileNotFoundError: raise RuntimeError( - "Make sure you install terraform. This test can only be run if you have access to the" - "backend deployment" + "Make sure you install terraform. This test can only be run if you have access to thebackend deployment" ) except RuntimeError as e: raise RuntimeError(f"Failed to run command\n{e}") diff --git a/e2e/test_search_by_identifier_immunization.py b/e2e/test_search_by_identifier_immunization.py index fda427b3c..ac06b4c1e 100644 --- a/e2e/test_search_by_identifier_immunization.py +++ b/e2e/test_search_by_identifier_immunization.py @@ -1,17 +1,16 @@ import pprint import uuid from decimal import Decimal -from typing import NamedTuple, Literal, Optional +from typing import Literal, NamedTuple, Optional from lib.env import get_service_base_path from utils.base_test import ImmunizationBaseTest from utils.constants import valid_nhs_number1 from utils.mappings import VaccineTypes -from utils.resource import generate_imms_resource, generate_filtered_imms_resource +from utils.resource import generate_filtered_imms_resource, generate_imms_resource class TestSearchImmunizationByIdentifier(ImmunizationBaseTest): - def store_records(self, *resources): ids = [] for res in resources: @@ -68,7 +67,6 @@ def test_search_backwards_compatible(self): """ for imms_api in self.imms_apis: with self.subTest(imms_api): - stored_imms_resource = generate_imms_resource() imms_identifier_value = stored_imms_resource["identifier"][0]["value"] imms_id = self.store_records(stored_imms_resource) diff --git a/e2e/test_search_identifier_elements_immunization.py b/e2e/test_search_identifier_elements_immunization.py index da6c3d130..156eeb663 100644 --- a/e2e/test_search_identifier_elements_immunization.py +++ b/e2e/test_search_identifier_elements_immunization.py @@ -1,6 +1,6 @@ import pprint import uuid -from typing import NamedTuple, Literal, Optional +from typing import Literal, NamedTuple, Optional from lib.env import get_service_base_path from utils.base_test import ImmunizationBaseTest @@ -10,7 +10,6 @@ class TestSearchImmunizationByIdentifier(ImmunizationBaseTest): - def store_records(self, *resources): ids = [] for res in resources: diff --git a/e2e/test_search_immunization.py b/e2e/test_search_immunization.py index 12ae7bf76..e7486a1ce 100644 --- a/e2e/test_search_immunization.py +++ b/e2e/test_search_immunization.py @@ -1,18 +1,18 @@ import pprint import uuid from decimal import Decimal -from typing import NamedTuple, Literal, Optional, List +from typing import List, Literal, NamedTuple, Optional from lib.env import get_service_base_path from utils.base_test import ImmunizationBaseTest from utils.constants import ( valid_nhs_number1, valid_nhs_number2, - valid_patient_identifier2, valid_patient_identifier1, + valid_patient_identifier2, ) from utils.mappings import VaccineTypes -from utils.resource import generate_imms_resource, generate_filtered_imms_resource +from utils.resource import generate_filtered_imms_resource, generate_imms_resource class TestSearchImmunization(ImmunizationBaseTest): diff --git a/e2e/test_sqs_dlq.py b/e2e/test_sqs_dlq.py index 6a76ee67e..d92579e6c 100644 --- a/e2e/test_sqs_dlq.py +++ b/e2e/test_sqs_dlq.py @@ -4,13 +4,11 @@ import boto3 from botocore.exceptions import ClientError # Handle potential errors - from utils.delete_sqs_messages import read_and_delete_messages from utils.get_sqs_url import get_queue_url class TestSQS(unittest.TestCase): - def setUp(self): # Get SQS queue url self.queue_name = os.environ["AWS_SQS_QUEUE_NAME"] diff --git a/e2e/test_update_immunization.py b/e2e/test_update_immunization.py index 98c934e90..451184a0f 100644 --- a/e2e/test_update_immunization.py +++ b/e2e/test_update_immunization.py @@ -7,7 +7,6 @@ class TestUpdateImmunization(ImmunizationBaseTest): - def test_update_imms(self): """it should update a FHIR Immunization resource""" for imms_api in self.imms_apis: diff --git a/e2e/utils/base_test.py b/e2e/utils/base_test.py index 405db1654..9f648b393 100644 --- a/e2e/utils/base_test.py +++ b/e2e/utils/base_test.py @@ -2,10 +2,7 @@ import uuid from typing import List -from utils.constants import cis2_user -from utils.immunisation_api import ImmunisationApi - -from lib.apigee import ApigeeService, ApigeeApp, ApigeeProduct +from lib.apigee import ApigeeApp, ApigeeProduct, ApigeeService from lib.authentication import ( AppRestrictedAuthentication, Cis2Authentication, @@ -16,12 +13,14 @@ get_proxy_name, get_service_base_path, ) +from utils.constants import cis2_user from utils.factories import ( + make_apigee_product, make_apigee_service, make_app_restricted_app, make_cis2_app, - make_apigee_product, ) +from utils.immunisation_api import ImmunisationApi class ImmunizationBaseTest(unittest.TestCase): diff --git a/e2e/utils/factories.py b/e2e/utils/factories.py index a7b155ba7..ab3a86c34 100644 --- a/e2e/utils/factories.py +++ b/e2e/utils/factories.py @@ -2,18 +2,18 @@ import uuid from typing import Set -from lib.apigee import ApigeeService, ApigeeConfig, ApigeeApp, ApigeeProduct +from lib.apigee import ApigeeApp, ApigeeConfig, ApigeeProduct, ApigeeService from lib.authentication import ( - AppRestrictedCredentials, AppRestrictedAuthentication, + AppRestrictedCredentials, AuthType, UserRestrictedCredentials, ) from lib.env import ( get_apigee_access_token, - get_auth_url, - get_apigee_username, get_apigee_env, + get_apigee_username, + get_auth_url, get_default_app_restricted_credentials, get_proxy_name, ) diff --git a/e2e/utils/immunisation_api.py b/e2e/utils/immunisation_api.py index dda191e2a..ea2747907 100644 --- a/e2e/utils/immunisation_api.py +++ b/e2e/utils/immunisation_api.py @@ -3,12 +3,12 @@ import time import uuid from datetime import datetime -from typing import Optional, Literal, List +from typing import List, Literal, Optional import requests - from lib.authentication import BaseAuthentication -from utils.resource import generate_imms_resource, delete_imms_records +from utils.resource import delete_imms_records, generate_imms_resource + from .constants import patient_identifier_system @@ -85,10 +85,10 @@ def make_request_with_backoff( # Check if the response matches the expected status code to identify potential issues if response.status_code != expected_status_code: if response.status_code >= 500: - raise RuntimeError(f"Server error: {response.status_code} during " f"in {http_method} {url}") + raise RuntimeError(f"Server error: {response.status_code} during in {http_method} {url}") else: raise ValueError( - f"Expected {expected_status_code} but got " f"{response.status_code} in {http_method} {url}" + f"Expected {expected_status_code} but got {response.status_code} in {http_method} {url}" ) return response @@ -220,7 +220,6 @@ def search_immunizations_full( body: Optional[str], expected_status_code: int = 200, ): - if http_method == "POST": url = f"{self.url}/Immunization/_search?{query_string}" else: diff --git a/e2e/utils/resource.py b/e2e/utils/resource.py index e974cf3e5..1b341ceba 100644 --- a/e2e/utils/resource.py +++ b/e2e/utils/resource.py @@ -3,14 +3,14 @@ import uuid from copy import deepcopy from decimal import Decimal -from typing import Union, Literal +from typing import Literal, Union import boto3 from botocore.config import Config from mypy_boto3_dynamodb.service_resource import DynamoDBServiceResource, Table from .constants import valid_nhs_number1 -from .mappings import vaccine_type_mappings, VaccineTypes +from .mappings import VaccineTypes, vaccine_type_mappings current_directory = os.path.dirname(os.path.realpath(__file__)) @@ -169,7 +169,4 @@ def delete_imms_records(identifiers: list[str]) -> None: failure_count = total # Assume all failed if batch writer fails if failure_count > 0: - print( - f"[teardown warning] Deleted {success_count} records out of {total}, " - f"failed to delete {failure_count}" - ) + print(f"[teardown warning] Deleted {success_count} records out of {total}, failed to delete {failure_count}") diff --git a/e2e_batch/clients.py b/e2e_batch/clients.py index 1cd05f050..3198df763 100644 --- a/e2e_batch/clients.py +++ b/e2e_batch/clients.py @@ -3,15 +3,16 @@ """ import logging + +from boto3 import client as boto3_client +from boto3 import resource as boto3_resource from constants import ( - environment, REGION, - batch_fifo_queue_name, ack_metadata_queue_name, audit_table_name, + batch_fifo_queue_name, + environment, ) -from boto3 import client as boto3_client, resource as boto3_resource - # AWS Clients and Resources diff --git a/e2e_batch/scenarios.py b/e2e_batch/scenarios.py index 436c30d67..50789049b 100644 --- a/e2e_batch/scenarios.py +++ b/e2e_batch/scenarios.py @@ -1,27 +1,28 @@ -import pandas as pd +import csv +import uuid from datetime import datetime, timezone -from vax_suppliers import TestPair, OdsVax + +import pandas as pd +from clients import logger from constants import ( + ACK_BUCKET, + RAVS_URI, ActionFlag, BusRowResult, DestinationType, Operation, - ACK_BUCKET, - RAVS_URI, OperationOutcome, ) +from errors import DynamoDBMismatchError from utils import ( - poll_s3_file_pattern, - fetch_pk_and_operation_from_dynamodb, - validate_fatal_error, - get_file_content_from_s3, aws_cleanup, create_row, + fetch_pk_and_operation_from_dynamodb, + get_file_content_from_s3, + poll_s3_file_pattern, + validate_fatal_error, ) -from clients import logger -from errors import DynamoDBMismatchError -import uuid -import csv +from vax_suppliers import OdsVax, TestPair class TestAction: @@ -111,14 +112,12 @@ def check_bus_file_content(self): if operation_outcome and "OPERATION_OUTCOME" in row: row_OPERATION_OUTCOME = row["OPERATION_OUTCOME"].strip() assert row_OPERATION_OUTCOME.startswith(operation_outcome), ( - f"{desc}.Row {i} expected OPERATION_OUTCOME '{operation_outcome}', " - f"but got '{row_OPERATION_OUTCOME}'" + f"{desc}.Row {i} expected OPERATION_OUTCOME '{operation_outcome}', but got '{row_OPERATION_OUTCOME}'" ) elif row_HEADER_RESPONSE_CODE == "Fatal Error": validate_fatal_error(desc, row, i, operation_outcome) def generate_csv_file(self): - self.file_name = self.get_file_name(self.vax, self.ods, self.version) logger.info(f'Test "{self.name}" File {self.file_name}') data = [] diff --git a/e2e_batch/test_e2e_batch.py b/e2e_batch/test_e2e_batch.py index b55b737ff..963e9d93a 100644 --- a/e2e_batch/test_e2e_batch.py +++ b/e2e_batch/test_e2e_batch.py @@ -1,30 +1,29 @@ import time import unittest -from utils import ( - upload_file_to_s3, - get_file_content_from_s3, - check_ack_file_content, - validate_row_count, - purge_sqs_queues, - delete_file_from_s3, -) from clients import logger +from constants import ( + ACK_BUCKET, + INPUT_PREFIX, + SOURCE_BUCKET, + TEMP_ACK_PREFIX, + DestinationType, + environment, +) from scenarios import ( - scenarios, TestCase, create_test_cases, enable_tests, generate_csv_files, + scenarios, ) - -from constants import ( - SOURCE_BUCKET, - INPUT_PREFIX, - ACK_BUCKET, - environment, - DestinationType, - TEMP_ACK_PREFIX, +from utils import ( + check_ack_file_content, + delete_file_from_s3, + get_file_content_from_s3, + purge_sqs_queues, + upload_file_to_s3, + validate_row_count, ) diff --git a/e2e_batch/utils.py b/e2e_batch/utils.py index d3a4347af..e057075a3 100644 --- a/e2e_batch/utils.py +++ b/e2e_batch/utils.py @@ -1,37 +1,38 @@ -import time import csv -import pandas as pd -import uuid -import json -import random import io +import json import os -from botocore.exceptions import ClientError -from boto3.dynamodb.conditions import Key -from io import StringIO +import random +import time +import uuid from datetime import datetime, timezone +from io import StringIO + +import pandas as pd +from boto3.dynamodb.conditions import Key +from botocore.exceptions import ClientError from clients import ( - logger, - s3_client, + ack_metadata_queue_url, audit_table, + batch_fifo_queue_url, events_table, + logger, + s3_client, sqs_client, - batch_fifo_queue_url, - ack_metadata_queue_url, ) -from errors import AckFileNotFoundError, DynamoDBMismatchError from constants import ( ACK_BUCKET, - FORWARDEDFILE_PREFIX, - SOURCE_BUCKET, - DUPLICATE, ACK_PREFIX, + DUPLICATE, FILE_NAME_VAL_ERROR, + FORWARDEDFILE_PREFIX, HEADER_RESPONSE_CODE_COLUMN, RAVS_URI, + SOURCE_BUCKET, ActionFlag, environment, ) +from errors import AckFileNotFoundError, DynamoDBMismatchError def upload_file_to_s3(file_name, bucket, prefix): @@ -313,9 +314,9 @@ def validate_row_count(desc, source_file_name, ack_file_name): """ source_file_row_count = fetch_row_count(SOURCE_BUCKET, f"archive/{source_file_name}") ack_file_row_count = fetch_row_count(ACK_BUCKET, ack_file_name) - assert ( - source_file_row_count == ack_file_row_count - ), f"{desc}. Row count mismatch: Input ({source_file_row_count}) vs Ack ({ack_file_row_count})" + assert source_file_row_count == ack_file_row_count, ( + f"{desc}. Row count mismatch: Input ({source_file_row_count}) vs Ack ({ack_file_row_count})" + ) def fetch_row_count(bucket, file_name): @@ -430,7 +431,6 @@ def verify_final_ack_file(file_key): def delete_filename_from_audit_table(filename) -> bool: - # 1. Query the GSI to get all items with the given filename try: response = audit_table.query( @@ -449,7 +449,6 @@ def delete_filename_from_audit_table(filename) -> bool: def delete_filename_from_events_table(identifier) -> bool: - # 1. Query the GSI to get all items with the given filename try: identifier_pk = f"{RAVS_URI}#{identifier}" diff --git a/filenameprocessor/src/audit_table.py b/filenameprocessor/src/audit_table.py index 84f8efe41..08bf7c068 100644 --- a/filenameprocessor/src/audit_table.py +++ b/filenameprocessor/src/audit_table.py @@ -1,9 +1,10 @@ """Add the filename to the audit table and check for duplicates.""" from typing import Optional + from clients import dynamodb_client, logger -from errors import UnhandledAuditTableError from constants import AUDIT_TABLE_NAME, AuditTableKeys +from errors import UnhandledAuditTableError def upsert_audit_table( diff --git a/filenameprocessor/src/clients.py b/filenameprocessor/src/clients.py index 7690d2921..3149d877a 100644 --- a/filenameprocessor/src/clients.py +++ b/filenameprocessor/src/clients.py @@ -3,10 +3,12 @@ lambda should be initialised ONCE ONLY (in this file) and then imported into the files where they are needed. """ -import os import logging +import os + import redis -from boto3 import client as boto3_client, resource as boto3_resource +from boto3 import client as boto3_client +from boto3 import resource as boto3_resource # AWS Clients and Resources. REGION_NAME = "eu-west-2" diff --git a/filenameprocessor/src/constants.py b/filenameprocessor/src/constants.py index fe1fddef4..9f675b0ac 100644 --- a/filenameprocessor/src/constants.py +++ b/filenameprocessor/src/constants.py @@ -4,10 +4,10 @@ from enum import StrEnum from errors import ( - VaccineTypePermissionsError, InvalidFileKeyError, UnhandledAuditTableError, UnhandledSqsError, + VaccineTypePermissionsError, ) SOURCE_BUCKET_NAME = os.getenv("SOURCE_BUCKET_NAME") diff --git a/filenameprocessor/src/elasticache.py b/filenameprocessor/src/elasticache.py index 7270d7a29..3dc8f9ee0 100644 --- a/filenameprocessor/src/elasticache.py +++ b/filenameprocessor/src/elasticache.py @@ -1,9 +1,10 @@ import json + from clients import redis_client from constants import ( - VACCINE_TYPE_TO_DISEASES_HASH_KEY, - SUPPLIER_PERMISSIONS_HASH_KEY, ODS_CODE_TO_SUPPLIER_SYSTEM_HASH_KEY, + SUPPLIER_PERMISSIONS_HASH_KEY, + VACCINE_TYPE_TO_DISEASES_HASH_KEY, ) diff --git a/filenameprocessor/src/file_name_processor.py b/filenameprocessor/src/file_name_processor.py index 6c3310639..15860372b 100644 --- a/filenameprocessor/src/file_name_processor.py +++ b/filenameprocessor/src/file_name_processor.py @@ -8,26 +8,27 @@ import argparse from uuid import uuid4 -from utils_for_filenameprocessor import get_creation_and_expiry_times, move_file -from file_validation import validate_file_key, is_file_in_directory_root -from send_sqs_message import make_and_send_sqs_message -from make_and_upload_ack_file import make_and_upload_the_ack_file + from audit_table import upsert_audit_table from clients import logger, s3_client -from logging_decorator import logging_decorator -from supplier_permissions import validate_vaccine_type_permissions +from constants import ( + ERROR_TYPE_TO_STATUS_CODE_MAP, + SOURCE_BUCKET_NAME, + FileNotProcessedReason, + FileStatus, +) from errors import ( - VaccineTypePermissionsError, InvalidFileKeyError, UnhandledAuditTableError, UnhandledSqsError, + VaccineTypePermissionsError, ) -from constants import ( - FileNotProcessedReason, - FileStatus, - ERROR_TYPE_TO_STATUS_CODE_MAP, - SOURCE_BUCKET_NAME, -) +from file_validation import is_file_in_directory_root, validate_file_key +from logging_decorator import logging_decorator +from make_and_upload_ack_file import make_and_upload_the_ack_file +from send_sqs_message import make_and_send_sqs_message +from supplier_permissions import validate_vaccine_type_permissions +from utils_for_filenameprocessor import get_creation_and_expiry_times, move_file # NOTE: logging_decorator is applied to handle_record function, rather than lambda_handler, because diff --git a/filenameprocessor/src/file_validation.py b/filenameprocessor/src/file_validation.py index 125393ec7..7440ad77b 100644 --- a/filenameprocessor/src/file_validation.py +++ b/filenameprocessor/src/file_validation.py @@ -1,11 +1,12 @@ """Functions for file key validation""" -from re import match from datetime import datetime +from re import match + from constants import VALID_VERSIONS from elasticache import ( - get_valid_vaccine_types_from_cache, get_supplier_system_from_cache, + get_valid_vaccine_types_from_cache, ) from errors import InvalidFileKeyError diff --git a/filenameprocessor/src/logging_decorator.py b/filenameprocessor/src/logging_decorator.py index c06f4f4c1..2cb281a25 100644 --- a/filenameprocessor/src/logging_decorator.py +++ b/filenameprocessor/src/logging_decorator.py @@ -5,6 +5,7 @@ import time from datetime import datetime from functools import wraps + from clients import firehose_client, logger STREAM_NAME = os.getenv("SPLUNK_FIREHOSE_NAME", "immunisation-fhir-api-internal-dev-splunk-firehose") diff --git a/filenameprocessor/src/make_and_upload_ack_file.py b/filenameprocessor/src/make_and_upload_ack_file.py index ccefba0ac..7d820adee 100644 --- a/filenameprocessor/src/make_and_upload_ack_file.py +++ b/filenameprocessor/src/make_and_upload_ack_file.py @@ -1,8 +1,9 @@ """Create ack file and upload to S3 bucket""" -from csv import writer import os -from io import StringIO, BytesIO +from csv import writer +from io import BytesIO, StringIO + from clients import s3_client diff --git a/filenameprocessor/src/send_sqs_message.py b/filenameprocessor/src/send_sqs_message.py index a5fb3c527..92746c0e1 100644 --- a/filenameprocessor/src/send_sqs_message.py +++ b/filenameprocessor/src/send_sqs_message.py @@ -2,7 +2,8 @@ import os from json import dumps as json_dumps -from clients import sqs_client, logger + +from clients import logger, sqs_client from errors import UnhandledSqsError diff --git a/filenameprocessor/src/supplier_permissions.py b/filenameprocessor/src/supplier_permissions.py index 8fbef2c75..79f8d8bb7 100644 --- a/filenameprocessor/src/supplier_permissions.py +++ b/filenameprocessor/src/supplier_permissions.py @@ -1,8 +1,8 @@ """Functions for fetching supplier permissions""" from clients import logger -from errors import VaccineTypePermissionsError from elasticache import get_supplier_permissions_from_cache +from errors import VaccineTypePermissionsError def validate_vaccine_type_permissions(vaccine_type: str, supplier: str) -> list: diff --git a/filenameprocessor/src/utils_for_filenameprocessor.py b/filenameprocessor/src/utils_for_filenameprocessor.py index d3ea2e1ff..d2a9324c9 100644 --- a/filenameprocessor/src/utils_for_filenameprocessor.py +++ b/filenameprocessor/src/utils_for_filenameprocessor.py @@ -1,7 +1,8 @@ """Utils for filenameprocessor lambda""" from datetime import timedelta -from clients import s3_client, logger + +from clients import logger, s3_client from constants import AUDIT_TABLE_TTL_DAYS diff --git a/filenameprocessor/tests/test_audit_table.py b/filenameprocessor/tests/test_audit_table.py index 0d958d2e0..3355d61f6 100644 --- a/filenameprocessor/tests/test_audit_table.py +++ b/filenameprocessor/tests/test_audit_table.py @@ -2,25 +2,26 @@ from unittest import TestCase from unittest.mock import patch + from boto3 import client as boto3_client from moto import mock_dynamodb -from tests.utils_for_tests.mock_environment_variables import MOCK_ENVIRONMENT_DICT from tests.utils_for_tests.generic_setup_and_teardown import ( GenericSetUp, GenericTearDown, ) -from tests.utils_for_tests.values_for_tests import MockFileDetails, FileDetails +from tests.utils_for_tests.mock_environment_variables import MOCK_ENVIRONMENT_DICT from tests.utils_for_tests.utils_for_filenameprocessor_tests import ( assert_audit_table_entry, ) +from tests.utils_for_tests.values_for_tests import FileDetails, MockFileDetails # Ensure environment variables are mocked before importing from src files with patch.dict("os.environ", MOCK_ENVIRONMENT_DICT): - from constants import AUDIT_TABLE_NAME, FileStatus from audit_table import upsert_audit_table - from errors import UnhandledAuditTableError from clients import REGION_NAME + from constants import AUDIT_TABLE_NAME, FileStatus + from errors import UnhandledAuditTableError dynamodb_client = boto3_client("dynamodb", region_name=REGION_NAME) diff --git a/filenameprocessor/tests/test_elasticache.py b/filenameprocessor/tests/test_elasticache.py index 098ab9b7e..87f0437cd 100644 --- a/filenameprocessor/tests/test_elasticache.py +++ b/filenameprocessor/tests/test_elasticache.py @@ -3,24 +3,25 @@ import json from unittest import TestCase from unittest.mock import patch + from boto3 import client as boto3_client from moto import mock_s3 -from tests.utils_for_tests.mock_environment_variables import MOCK_ENVIRONMENT_DICT from tests.utils_for_tests.generic_setup_and_teardown import ( GenericSetUp, GenericTearDown, ) +from tests.utils_for_tests.mock_environment_variables import MOCK_ENVIRONMENT_DICT from tests.utils_for_tests.utils_for_filenameprocessor_tests import create_mock_hget # Ensure environment variables are mocked before importing from src files with patch.dict("os.environ", MOCK_ENVIRONMENT_DICT): + from clients import REGION_NAME from elasticache import ( get_supplier_permissions_from_cache, - get_valid_vaccine_types_from_cache, get_supplier_system_from_cache, + get_valid_vaccine_types_from_cache, ) - from clients import REGION_NAME s3_client = boto3_client("s3", region_name=REGION_NAME) diff --git a/filenameprocessor/tests/test_file_key_validation.py b/filenameprocessor/tests/test_file_key_validation.py index a7ca2bc6d..73560e39f 100644 --- a/filenameprocessor/tests/test_file_key_validation.py +++ b/filenameprocessor/tests/test_file_key_validation.py @@ -3,21 +3,21 @@ from unittest import TestCase from unittest.mock import patch -from tests.utils_for_tests.values_for_tests import MockFileDetails from tests.utils_for_tests.mock_environment_variables import MOCK_ENVIRONMENT_DICT from tests.utils_for_tests.utils_for_filenameprocessor_tests import ( MOCK_ODS_CODE_TO_SUPPLIER, create_mock_hget, ) +from tests.utils_for_tests.values_for_tests import MockFileDetails # Ensure environment variables are mocked before importing from src files with patch.dict("os.environ", MOCK_ENVIRONMENT_DICT): + from errors import InvalidFileKeyError from file_validation import ( is_file_in_directory_root, is_valid_datetime, validate_file_key, ) - from errors import InvalidFileKeyError VALID_FLU_EMIS_FILE_KEY = MockFileDetails.emis_flu.file_key VALID_RSV_RAVS_FILE_KEY = MockFileDetails.ravs_rsv_1.file_key diff --git a/filenameprocessor/tests/test_lambda_handler.py b/filenameprocessor/tests/test_lambda_handler.py index 1c754dd11..5642adede 100644 --- a/filenameprocessor/tests/test_lambda_handler.py +++ b/filenameprocessor/tests/test_lambda_handler.py @@ -2,41 +2,42 @@ import json import sys -from unittest.mock import patch, ANY -from unittest import TestCase -from json import loads as json_loads from contextlib import ExitStack from copy import deepcopy +from json import loads as json_loads +from unittest import TestCase +from unittest.mock import ANY, patch + import fakeredis from boto3 import client as boto3_client -from moto import mock_s3, mock_sqs, mock_firehose, mock_dynamodb +from moto import mock_dynamodb, mock_firehose, mock_s3, mock_sqs from tests.utils_for_tests.generic_setup_and_teardown import ( GenericSetUp, GenericTearDown, ) -from tests.utils_for_tests.utils_for_filenameprocessor_tests import ( - assert_audit_table_entry, - create_mock_hget, - MOCK_ODS_CODE_TO_SUPPLIER, -) from tests.utils_for_tests.mock_environment_variables import ( MOCK_ENVIRONMENT_DICT, BucketNames, Sqs, ) +from tests.utils_for_tests.utils_for_filenameprocessor_tests import ( + MOCK_ODS_CODE_TO_SUPPLIER, + assert_audit_table_entry, + create_mock_hget, +) from tests.utils_for_tests.values_for_tests import ( - MOCK_CREATED_AT_FORMATTED_STRING, - MockFileDetails, MOCK_BATCH_FILE_CONTENT, + MOCK_CREATED_AT_FORMATTED_STRING, MOCK_EXPIRES_AT, + MockFileDetails, ) # Ensure environment variables are mocked before importing from src files with patch.dict("os.environ", MOCK_ENVIRONMENT_DICT): - from file_name_processor import lambda_handler, handle_record from clients import REGION_NAME - from constants import AUDIT_TABLE_NAME, FileStatus, AuditTableKeys + from constants import AUDIT_TABLE_NAME, AuditTableKeys, FileStatus + from file_name_processor import handle_record, lambda_handler s3_client = boto3_client("s3", region_name=REGION_NAME) sqs_client = boto3_client("sqs", region_name=REGION_NAME) @@ -443,7 +444,6 @@ def test_unexpected_bucket_name_and_filename_validation_fails(self): class TestMainEntryPoint(TestCase): - def test_run_local_constructs_event_and_calls_lambda_handler(self): test_args = [ "file_name_processor.py", diff --git a/filenameprocessor/tests/test_logging_decorator.py b/filenameprocessor/tests/test_logging_decorator.py index 0dc3285f6..f90f80168 100644 --- a/filenameprocessor/tests/test_logging_decorator.py +++ b/filenameprocessor/tests/test_logging_decorator.py @@ -1,12 +1,13 @@ """Tests for the logging_decorator and its helper functions""" -import unittest -from unittest.mock import patch import json +import unittest from contextlib import ExitStack +from unittest.mock import patch + from boto3 import client as boto3_client from botocore.exceptions import ClientError -from moto import mock_s3, mock_firehose, mock_sqs, mock_dynamodb +from moto import mock_dynamodb, mock_firehose, mock_s3, mock_sqs from tests.utils_for_tests.generic_setup_and_teardown import ( GenericSetUp, @@ -17,18 +18,18 @@ BucketNames, Firehose, ) +from tests.utils_for_tests.utils_for_filenameprocessor_tests import create_mock_hget from tests.utils_for_tests.values_for_tests import ( + MOCK_BATCH_FILE_CONTENT, MockFileDetails, fixed_datetime, - MOCK_BATCH_FILE_CONTENT, ) -from tests.utils_for_tests.utils_for_filenameprocessor_tests import create_mock_hget # Ensure environment variables are mocked before importing from src files with patch.dict("os.environ", MOCK_ENVIRONMENT_DICT): from clients import REGION_NAME from file_name_processor import lambda_handler - from logging_decorator import send_log_to_firehose, generate_and_send_logs + from logging_decorator import generate_and_send_logs, send_log_to_firehose s3_client = boto3_client("s3", region_name=REGION_NAME) sqs_client = boto3_client("sqs", region_name=REGION_NAME) diff --git a/filenameprocessor/tests/test_make_and_upload_ack_file.py b/filenameprocessor/tests/test_make_and_upload_ack_file.py index 0005ca3db..f1dadf587 100644 --- a/filenameprocessor/tests/test_make_and_upload_ack_file.py +++ b/filenameprocessor/tests/test_make_and_upload_ack_file.py @@ -1,28 +1,29 @@ """Tests for make_and_upload_ack_file functions""" +from copy import deepcopy from unittest import TestCase from unittest.mock import patch -from copy import deepcopy + from boto3 import client as boto3_client from moto import mock_s3 -from tests.utils_for_tests.utils_for_filenameprocessor_tests import ( - get_csv_file_dict_reader, -) from tests.utils_for_tests.mock_environment_variables import ( MOCK_ENVIRONMENT_DICT, BucketNames, ) +from tests.utils_for_tests.utils_for_filenameprocessor_tests import ( + get_csv_file_dict_reader, +) from tests.utils_for_tests.values_for_tests import MockFileDetails # Ensure environment variables are mocked before importing from src files with patch.dict("os.environ", MOCK_ENVIRONMENT_DICT): + from clients import REGION_NAME from make_and_upload_ack_file import ( + make_and_upload_the_ack_file, make_the_ack_data, upload_ack_file, - make_and_upload_the_ack_file, ) - from clients import REGION_NAME s3_client = boto3_client("s3", region_name=REGION_NAME) diff --git a/filenameprocessor/tests/test_send_sqs_message.py b/filenameprocessor/tests/test_send_sqs_message.py index 054996e5c..a05048521 100644 --- a/filenameprocessor/tests/test_send_sqs_message.py +++ b/filenameprocessor/tests/test_send_sqs_message.py @@ -1,20 +1,21 @@ """Tests for send_sqs_message functions""" +from copy import deepcopy +from json import loads as json_loads from unittest import TestCase from unittest.mock import patch -from json import loads as json_loads -from copy import deepcopy -from moto import mock_sqs + from boto3 import client as boto3_client +from moto import mock_sqs from tests.utils_for_tests.mock_environment_variables import MOCK_ENVIRONMENT_DICT, Sqs from tests.utils_for_tests.values_for_tests import MockFileDetails # Ensure environment variables are mocked before importing from src files with patch.dict("os.environ", MOCK_ENVIRONMENT_DICT): - from send_sqs_message import send_to_supplier_queue, make_and_send_sqs_message - from errors import UnhandledSqsError from clients import REGION_NAME + from errors import UnhandledSqsError + from send_sqs_message import make_and_send_sqs_message, send_to_supplier_queue sqs_client = boto3_client("sqs", region_name=REGION_NAME) diff --git a/filenameprocessor/tests/test_supplier_permissions.py b/filenameprocessor/tests/test_supplier_permissions.py index d373a9223..5f1981077 100644 --- a/filenameprocessor/tests/test_supplier_permissions.py +++ b/filenameprocessor/tests/test_supplier_permissions.py @@ -7,8 +7,8 @@ # Ensure environment variables are mocked before importing from src files with patch.dict("os.environ", MOCK_ENVIRONMENT_DICT): - from supplier_permissions import validate_vaccine_type_permissions from errors import VaccineTypePermissionsError + from supplier_permissions import validate_vaccine_type_permissions class TestSupplierPermissions(TestCase): diff --git a/filenameprocessor/tests/test_utils_for_filenameprocessor.py b/filenameprocessor/tests/test_utils_for_filenameprocessor.py index 692f9eea9..449274235 100644 --- a/filenameprocessor/tests/test_utils_for_filenameprocessor.py +++ b/filenameprocessor/tests/test_utils_for_filenameprocessor.py @@ -1,24 +1,25 @@ """Tests for utils_for_filenameprocessor functions""" +from datetime import datetime, timedelta, timezone from unittest import TestCase from unittest.mock import patch -from datetime import datetime, timedelta, timezone -from moto import mock_s3 + from boto3 import client as boto3_client +from moto import mock_s3 -from tests.utils_for_tests.mock_environment_variables import ( - MOCK_ENVIRONMENT_DICT, - BucketNames, -) from tests.utils_for_tests.generic_setup_and_teardown import ( GenericSetUp, GenericTearDown, ) +from tests.utils_for_tests.mock_environment_variables import ( + MOCK_ENVIRONMENT_DICT, + BucketNames, +) # Ensure environment variables are mocked before importing from src files with patch.dict("os.environ", MOCK_ENVIRONMENT_DICT): - from constants import AUDIT_TABLE_TTL_DAYS from clients import REGION_NAME + from constants import AUDIT_TABLE_TTL_DAYS from utils_for_filenameprocessor import get_creation_and_expiry_times, move_file s3_client = boto3_client("s3", region_name=REGION_NAME) diff --git a/filenameprocessor/tests/utils_for_tests/generic_setup_and_teardown.py b/filenameprocessor/tests/utils_for_tests/generic_setup_and_teardown.py index c7d79359d..27d87aa42 100644 --- a/filenameprocessor/tests/utils_for_tests/generic_setup_and_teardown.py +++ b/filenameprocessor/tests/utils_for_tests/generic_setup_and_teardown.py @@ -3,16 +3,16 @@ from unittest.mock import patch from tests.utils_for_tests.mock_environment_variables import ( - BucketNames, MOCK_ENVIRONMENT_DICT, - Sqs, + BucketNames, Firehose, + Sqs, ) # Ensure environment variables are mocked before importing from src files with patch.dict("os.environ", MOCK_ENVIRONMENT_DICT): from clients import REGION_NAME - from constants import AuditTableKeys, AUDIT_TABLE_NAME + from constants import AUDIT_TABLE_NAME, AuditTableKeys class GenericSetUp: @@ -32,7 +32,6 @@ def __init__( sqs_client=None, dynamodb_client=None, ): - if s3_client: for bucket_name in [ BucketNames.SOURCE, @@ -78,7 +77,6 @@ def __init__( sqs_client=None, dynamodb_client=None, ): - if s3_client: for bucket_name in [ BucketNames.SOURCE, diff --git a/filenameprocessor/tests/utils_for_tests/utils_for_filenameprocessor_tests.py b/filenameprocessor/tests/utils_for_tests/utils_for_filenameprocessor_tests.py index c7956735e..440046aa2 100644 --- a/filenameprocessor/tests/utils_for_tests/utils_for_filenameprocessor_tests.py +++ b/filenameprocessor/tests/utils_for_tests/utils_for_filenameprocessor_tests.py @@ -1,22 +1,23 @@ """Utils functions for filenameprocessor tests""" -from unittest.mock import patch from io import StringIO -from boto3 import client as boto3_client +from unittest.mock import patch -from tests.utils_for_tests.values_for_tests import FileDetails, MockFileDetails +from boto3 import client as boto3_client from tests.utils_for_tests.mock_environment_variables import MOCK_ENVIRONMENT_DICT +from tests.utils_for_tests.values_for_tests import FileDetails, MockFileDetails # Ensure environment variables are mocked before importing from src files with patch.dict("os.environ", MOCK_ENVIRONMENT_DICT): - from clients import REGION_NAME from csv import DictReader + + from clients import REGION_NAME from constants import ( - AuditTableKeys, AUDIT_TABLE_NAME, - FileStatus, - SUPPLIER_PERMISSIONS_HASH_KEY, ODS_CODE_TO_SUPPLIER_SYSTEM_HASH_KEY, + SUPPLIER_PERMISSIONS_HASH_KEY, + AuditTableKeys, + FileStatus, ) MOCK_ODS_CODE_TO_SUPPLIER = {"YGM41": "EMIS", "X8E5B": "RAVS"} diff --git a/filenameprocessor/tests/utils_for_tests/values_for_tests.py b/filenameprocessor/tests/utils_for_tests/values_for_tests.py index 8d48d4b9a..017d013d6 100644 --- a/filenameprocessor/tests/utils_for_tests/values_for_tests.py +++ b/filenameprocessor/tests/utils_for_tests/values_for_tests.py @@ -1,7 +1,7 @@ """File of values which can be used for testing""" -from unittest.mock import patch from datetime import datetime +from unittest.mock import patch from tests.utils_for_tests.mock_environment_variables import MOCK_ENVIRONMENT_DICT diff --git a/lambdas/ack_backend/src/ack_processor.py b/lambdas/ack_backend/src/ack_processor.py index 0c44ef6e1..d632c0ad8 100644 --- a/lambdas/ack_backend/src/ack_processor.py +++ b/lambdas/ack_backend/src/ack_processor.py @@ -1,10 +1,11 @@ """Ack lambda handler""" import json + +from convert_message_to_ack_row import convert_message_to_ack_row from logging_decorators import ack_lambda_handler_logging_decorator -from update_ack_file import update_ack_file, complete_batch_file_process +from update_ack_file import complete_batch_file_process, update_ack_file from utils_for_ack_lambda import is_ack_processing_complete -from convert_message_to_ack_row import convert_message_to_ack_row @ack_lambda_handler_logging_decorator @@ -26,7 +27,6 @@ def lambda_handler(event, _): total_ack_rows_processed = 0 for i, record in enumerate(event["Records"]): - try: incoming_message_body = json.loads(record["body"]) except Exception as body_json_error: diff --git a/lambdas/ack_backend/src/audit_table.py b/lambdas/ack_backend/src/audit_table.py index b21ab08d7..4399c43a7 100644 --- a/lambdas/ack_backend/src/audit_table.py +++ b/lambdas/ack_backend/src/audit_table.py @@ -1,9 +1,10 @@ """Add the filename to the audit table and check for duplicates.""" from typing import Optional + from common.clients import dynamodb_client, logger from common.models.errors import UnhandledAuditTableError -from constants import AUDIT_TABLE_NAME, FileStatus, AuditTableKeys +from constants import AUDIT_TABLE_NAME, AuditTableKeys, FileStatus def change_audit_table_status_to_processed(file_key: str, message_id: str) -> None: diff --git a/lambdas/ack_backend/src/logging_decorators.py b/lambdas/ack_backend/src/logging_decorators.py index e78cb7671..2317d17fb 100644 --- a/lambdas/ack_backend/src/logging_decorators.py +++ b/lambdas/ack_backend/src/logging_decorators.py @@ -4,6 +4,7 @@ import time from datetime import datetime from functools import wraps + from common.log_decorator import generate_and_send_logs PREFIX = "ack_processor" @@ -73,7 +74,6 @@ def complete_batch_file_process_logging_decorator(func): @wraps(func) def wrapper(*args, **kwargs): - base_log_data = { "function_name": f"{PREFIX}_{func.__name__}", "date_time": str(datetime.now()), @@ -101,7 +101,6 @@ def ack_lambda_handler_logging_decorator(func): @wraps(func) def wrapper(event, context, *args, **kwargs): - base_log_data = { "function_name": f"{PREFIX}_{func.__name__}", "date_time": str(datetime.now()), diff --git a/lambdas/ack_backend/src/update_ack_file.py b/lambdas/ack_backend/src/update_ack_file.py index d851cb416..f1b70d866 100644 --- a/lambdas/ack_backend/src/update_ack_file.py +++ b/lambdas/ack_backend/src/update_ack_file.py @@ -1,17 +1,18 @@ """Functions for uploading the data to the ack file""" -from botocore.exceptions import ClientError -from io import StringIO, BytesIO +from io import BytesIO, StringIO + from audit_table import change_audit_table_status_to_processed +from botocore.exceptions import ClientError from common.clients import get_s3_client, logger from constants import ( ACK_HEADERS, - get_source_bucket_name, - get_ack_bucket_name, + BATCH_FILE_ARCHIVE_DIR, + BATCH_FILE_PROCESSING_DIR, COMPLETED_ACK_DIR, TEMP_ACK_DIR, - BATCH_FILE_PROCESSING_DIR, - BATCH_FILE_ARCHIVE_DIR, + get_ack_bucket_name, + get_source_bucket_name, ) from logging_decorators import complete_batch_file_process_logging_decorator diff --git a/lambdas/ack_backend/tests/test_ack_processor.py b/lambdas/ack_backend/tests/test_ack_processor.py index ebaffe885..21cf93939 100644 --- a/lambdas/ack_backend/tests/test_ack_processor.py +++ b/lambdas/ack_backend/tests/test_ack_processor.py @@ -1,33 +1,33 @@ """Tests for the ack processor lambda handler.""" -import unittest -import os import json -from unittest.mock import patch, Mock +import os +import unittest from io import StringIO +from unittest.mock import Mock, patch + from boto3 import client as boto3_client from moto import mock_aws - +from utils.generic_setup_and_teardown_for_ack_backend import ( + GenericSetUp, + GenericTearDown, +) from utils.mock_environment_variables import ( AUDIT_TABLE_NAME, MOCK_ENVIRONMENT_DICT, - BucketNames, REGION_NAME, -) -from utils.generic_setup_and_teardown_for_ack_backend import ( - GenericSetUp, - GenericTearDown, + BucketNames, ) from utils.utils_for_ack_backend_tests import ( add_audit_entry_to_table, - validate_ack_file_content, generate_sample_existing_ack_content, + validate_ack_file_content, ) from utils.values_for_ack_backend_tests import ( - DiagnosticsDictionaries, + EXPECTED_ACK_LAMBDA_RESPONSE_FOR_SUCCESS, MOCK_MESSAGE_DETAILS, + DiagnosticsDictionaries, ValidValues, - EXPECTED_ACK_LAMBDA_RESPONSE_FOR_SUCCESS, ) from utils_for_ack_lambda import _BATCH_EVENT_ID_TO_RECORD_COUNT_MAP @@ -184,7 +184,7 @@ def test_lambda_handler_main(self): test_cases = [ { "description": "Multiple messages: all successful", - "messages": [{"row_id": f"row^{i+1}"} for i in range(10)], + "messages": [{"row_id": f"row^{i + 1}"} for i in range(10)], }, { "description": "Multiple messages: all with diagnostics (failure messages)", diff --git a/lambdas/ack_backend/tests/test_audit_table.py b/lambdas/ack_backend/tests/test_audit_table.py index 2f362bf61..e61ab14e7 100644 --- a/lambdas/ack_backend/tests/test_audit_table.py +++ b/lambdas/ack_backend/tests/test_audit_table.py @@ -1,11 +1,11 @@ import unittest from unittest.mock import patch + import audit_table from common.models.errors import UnhandledAuditTableError class TestAuditTable(unittest.TestCase): - def setUp(self): self.logger_patcher = patch("audit_table.logger") self.mock_logger = self.logger_patcher.start() diff --git a/lambdas/ack_backend/tests/test_convert_message_to_ack_row.py b/lambdas/ack_backend/tests/test_convert_message_to_ack_row.py index 143b9f482..bdf43b499 100644 --- a/lambdas/ack_backend/tests/test_convert_message_to_ack_row.py +++ b/lambdas/ack_backend/tests/test_convert_message_to_ack_row.py @@ -6,8 +6,8 @@ from tests.utils.mock_environment_variables import MOCK_ENVIRONMENT_DICT from tests.utils.values_for_ack_backend_tests import ( DefaultValues, - ValidValues, DiagnosticsDictionaries, + ValidValues, ) with patch.dict("os.environ", MOCK_ENVIRONMENT_DICT): diff --git a/lambdas/ack_backend/tests/test_logging_decorators.py b/lambdas/ack_backend/tests/test_logging_decorators.py index f9b85d85f..d410c4af8 100644 --- a/lambdas/ack_backend/tests/test_logging_decorators.py +++ b/lambdas/ack_backend/tests/test_logging_decorators.py @@ -1,5 +1,6 @@ import unittest from unittest.mock import patch + import logging_decorators diff --git a/lambdas/ack_backend/tests/test_splunk_logging.py b/lambdas/ack_backend/tests/test_splunk_logging.py index fac195547..aabc249b4 100644 --- a/lambdas/ack_backend/tests/test_splunk_logging.py +++ b/lambdas/ack_backend/tests/test_splunk_logging.py @@ -1,25 +1,26 @@ """Tests for ack lambda logging decorators""" -import unittest -from unittest.mock import patch, call import json -from io import StringIO +import unittest from contextlib import ExitStack -from moto import mock_aws +from io import StringIO +from unittest.mock import call, patch + from boto3 import client as boto3_client +from moto import mock_aws -from tests.utils.values_for_ack_backend_tests import ( - ValidValues, - InvalidValues, - DiagnosticsDictionaries, - EXPECTED_ACK_LAMBDA_RESPONSE_FOR_SUCCESS, -) -from tests.utils.mock_environment_variables import MOCK_ENVIRONMENT_DICT, BucketNames from tests.utils.generic_setup_and_teardown_for_ack_backend import ( GenericSetUp, GenericTearDown, ) +from tests.utils.mock_environment_variables import MOCK_ENVIRONMENT_DICT, BucketNames from tests.utils.utils_for_ack_backend_tests import generate_event +from tests.utils.values_for_ack_backend_tests import ( + EXPECTED_ACK_LAMBDA_RESPONSE_FOR_SUCCESS, + DiagnosticsDictionaries, + InvalidValues, + ValidValues, +) with patch.dict("os.environ", MOCK_ENVIRONMENT_DICT): from ack_processor import lambda_handler diff --git a/lambdas/ack_backend/tests/test_update_ack_file.py b/lambdas/ack_backend/tests/test_update_ack_file.py index 8cea1201e..398947952 100644 --- a/lambdas/ack_backend/tests/test_update_ack_file.py +++ b/lambdas/ack_backend/tests/test_update_ack_file.py @@ -1,36 +1,35 @@ """Tests for the functions in the update_ack_file module.""" -import unittest import os +import unittest +from io import StringIO +from unittest.mock import patch + from boto3 import client as boto3_client from moto import mock_aws - -from utils.values_for_ack_backend_tests import ValidValues, DefaultValues -from utils.mock_environment_variables import ( - MOCK_ENVIRONMENT_DICT, - BucketNames, - REGION_NAME, -) from utils.generic_setup_and_teardown_for_ack_backend import ( GenericSetUp, GenericTearDown, ) +from utils.mock_environment_variables import ( + MOCK_ENVIRONMENT_DICT, + REGION_NAME, + BucketNames, +) from utils.utils_for_ack_backend_tests import ( - setup_existing_ack_file, - obtain_current_ack_file_content, + MOCK_MESSAGE_DETAILS, + generate_expected_ack_content, generate_expected_ack_file_row, generate_sample_existing_ack_content, - generate_expected_ack_content, - MOCK_MESSAGE_DETAILS, + obtain_current_ack_file_content, + setup_existing_ack_file, ) - -from unittest.mock import patch -from io import StringIO +from utils.values_for_ack_backend_tests import DefaultValues, ValidValues with patch.dict("os.environ", MOCK_ENVIRONMENT_DICT): from update_ack_file import ( - obtain_current_ack_content, create_ack_data, + obtain_current_ack_content, update_ack_file, ) diff --git a/lambdas/ack_backend/tests/test_update_ack_file_flow.py b/lambdas/ack_backend/tests/test_update_ack_file_flow.py index 73b3ba273..a613436ab 100644 --- a/lambdas/ack_backend/tests/test_update_ack_file_flow.py +++ b/lambdas/ack_backend/tests/test_update_ack_file_flow.py @@ -1,8 +1,8 @@ -from unittest.mock import patch -import update_ack_file import unittest -import boto3 +from unittest.mock import patch +import boto3 +import update_ack_file from moto import mock_aws diff --git a/lambdas/ack_backend/tests/utils/generic_setup_and_teardown_for_ack_backend.py b/lambdas/ack_backend/tests/utils/generic_setup_and_teardown_for_ack_backend.py index e35c8d863..164bd7cd0 100644 --- a/lambdas/ack_backend/tests/utils/generic_setup_and_teardown_for_ack_backend.py +++ b/lambdas/ack_backend/tests/utils/generic_setup_and_teardown_for_ack_backend.py @@ -1,9 +1,9 @@ """Generic setup and teardown for ACK backend tests""" -from tests.utils.mock_environment_variables import AUDIT_TABLE_NAME, BucketNames, Firehose, REGION_NAME - from constants import AuditTableKeys +from tests.utils.mock_environment_variables import AUDIT_TABLE_NAME, REGION_NAME, BucketNames, Firehose + class GenericSetUp: """ @@ -14,7 +14,6 @@ class GenericSetUp: """ def __init__(self, s3_client=None, firehose_client=None, dynamodb_client=None): - if s3_client: for bucket_name in [ BucketNames.SOURCE, @@ -50,7 +49,6 @@ class GenericTearDown: """Performs generic tear down of mock resources""" def __init__(self, s3_client=None, firehose_client=None, dynamodb_client=None): - if s3_client: for bucket_name in [ BucketNames.SOURCE, diff --git a/lambdas/ack_backend/tests/utils/utils_for_ack_backend_tests.py b/lambdas/ack_backend/tests/utils/utils_for_ack_backend_tests.py index 5d8af01cd..e18d4463e 100644 --- a/lambdas/ack_backend/tests/utils/utils_for_ack_backend_tests.py +++ b/lambdas/ack_backend/tests/utils/utils_for_ack_backend_tests.py @@ -2,9 +2,11 @@ import json from typing import Optional + from boto3 import client as boto3_client -from tests.utils.values_for_ack_backend_tests import ValidValues, MOCK_MESSAGE_DETAILS + from tests.utils.mock_environment_variables import AUDIT_TABLE_NAME, REGION_NAME, BucketNames +from tests.utils.values_for_ack_backend_tests import MOCK_MESSAGE_DETAILS, ValidValues firehose_client = boto3_client("firehose", region_name=REGION_NAME) diff --git a/lambdas/id_sync/src/id_sync.py b/lambdas/id_sync/src/id_sync.py index a79ca20a4..070b927f7 100644 --- a/lambdas/id_sync/src/id_sync.py +++ b/lambdas/id_sync/src/id_sync.py @@ -6,8 +6,9 @@ """ from typing import Any, Dict + from common.aws_lambda_event import AwsLambdaEvent -from common.clients import logger, STREAM_NAME +from common.clients import STREAM_NAME, logger from common.log_decorator import logging_decorator from exceptions.id_sync_exception import IdSyncException from record_processor import process_record diff --git a/lambdas/id_sync/src/ieds_db_operations.py b/lambdas/id_sync/src/ieds_db_operations.py index 2ecac4a7a..fd88a25cb 100644 --- a/lambdas/id_sync/src/ieds_db_operations.py +++ b/lambdas/id_sync/src/ieds_db_operations.py @@ -1,10 +1,11 @@ +import json + from boto3.dynamodb.conditions import Key -from os_vars import get_ieds_table_name from common.aws_dynamodb import get_dynamodb_table -from common.clients import logger, dynamodb_client -import json -from utils import make_status +from common.clients import dynamodb_client, logger from exceptions.id_sync_exception import IdSyncException +from os_vars import get_ieds_table_name +from utils import make_status ieds_table = None BATCH_SIZE = 25 # DynamoDB TransactWriteItems max batch size diff --git a/lambdas/id_sync/src/pds_details.py b/lambdas/id_sync/src/pds_details.py index 7f6271aa5..d38ece5f9 100644 --- a/lambdas/id_sync/src/pds_details.py +++ b/lambdas/id_sync/src/pds_details.py @@ -3,12 +3,13 @@ """ import tempfile -from os_vars import get_pds_env + from common.authentication import AppRestrictedAuth, Service from common.cache import Cache from common.clients import logger, secrets_manager_client from common.pds_service import PdsService from exceptions.id_sync_exception import IdSyncException +from os_vars import get_pds_env pds_env = get_pds_env() safe_tmp_dir = tempfile.mkdtemp(dir="/tmp") # NOSONAR diff --git a/lambdas/id_sync/src/record_processor.py b/lambdas/id_sync/src/record_processor.py index 2918d9b18..6d42d93e8 100644 --- a/lambdas/id_sync/src/record_processor.py +++ b/lambdas/id_sync/src/record_processor.py @@ -1,18 +1,18 @@ +import ast +import json +from typing import Any, Dict + from common.clients import logger -from typing import Dict, Any -from pds_details import pds_get_patient_id, pds_get_patient_details from ieds_db_operations import ( - ieds_update_patient_id, extract_patient_resource_from_item, get_items_from_patient_id, + ieds_update_patient_id, ) +from pds_details import pds_get_patient_details, pds_get_patient_id from utils import make_status -import json -import ast def process_record(event_record: Dict[str, Any]) -> Dict[str, Any]: - logger.info("process_record. Processing record: %s", event_record) body_text = event_record.get("body", "") diff --git a/lambdas/id_sync/src/utils.py b/lambdas/id_sync/src/utils.py index ea9dcdef4..920ad7b84 100644 --- a/lambdas/id_sync/src/utils.py +++ b/lambdas/id_sync/src/utils.py @@ -1,4 +1,4 @@ -from typing import Dict, Any +from typing import Any, Dict def make_status(msg: str, nhs_number: str | None = None, status: str = "success") -> Dict[str, Any]: diff --git a/lambdas/id_sync/tests/test_id_sync.py b/lambdas/id_sync/tests/test_id_sync.py index 81ca598a1..f86b242c9 100644 --- a/lambdas/id_sync/tests/test_id_sync.py +++ b/lambdas/id_sync/tests/test_id_sync.py @@ -1,15 +1,13 @@ import unittest -from unittest.mock import patch, MagicMock - +from unittest.mock import MagicMock, patch with patch("common.log_decorator.logging_decorator") as mock_decorator: mock_decorator.return_value = lambda f: f # Pass-through decorator - from id_sync import handler from exceptions.id_sync_exception import IdSyncException + from id_sync import handler class TestIdSyncHandler(unittest.TestCase): - def setUp(self): """Set up all patches and test fixtures""" # Patch all dependencies diff --git a/lambdas/id_sync/tests/test_ieds_db_operations.py b/lambdas/id_sync/tests/test_ieds_db_operations.py index 2138a7954..4a67d41a3 100644 --- a/lambdas/id_sync/tests/test_ieds_db_operations.py +++ b/lambdas/id_sync/tests/test_ieds_db_operations.py @@ -1,13 +1,12 @@ import unittest +from unittest.mock import MagicMock, patch -from ieds_db_operations import extract_patient_resource_from_item -from unittest.mock import patch, MagicMock -from exceptions.id_sync_exception import IdSyncException import ieds_db_operations +from exceptions.id_sync_exception import IdSyncException +from ieds_db_operations import extract_patient_resource_from_item class TestExtractPatientResourceFromItem(unittest.TestCase): - def test_extract_from_dict_with_contained_patient(self): item = { "Resource": { @@ -67,7 +66,6 @@ def tearDown(self): class TestGetIedsTable(TestIedsDbOperations): - def setUp(self): """Set up test fixtures""" super().setUp() @@ -302,7 +300,6 @@ def test_get_ieds_table_exception_handling(self): class TestUpdatePatientIdInIEDS(TestIedsDbOperations): - def setUp(self): super().setUp() # Mock get_ieds_table() and subsequent calls @@ -541,7 +538,6 @@ def test_ieds_update_patient_id_special_characters(self): class TestGetItemsToUpdate(TestIedsDbOperations): - def setUp(self): super().setUp() # Mock get_ieds_table() diff --git a/lambdas/id_sync/tests/test_os_vars.py b/lambdas/id_sync/tests/test_os_vars.py index 8796b3044..f06d7fb47 100644 --- a/lambdas/id_sync/tests/test_os_vars.py +++ b/lambdas/id_sync/tests/test_os_vars.py @@ -1,12 +1,11 @@ -import unittest import os +import unittest from unittest.mock import patch import os_vars class TestOsVars(unittest.TestCase): - def setUp(self): os_vars._ieds_table_name = None os_vars._delta_table_name = None diff --git a/lambdas/id_sync/tests/test_pds_details.py b/lambdas/id_sync/tests/test_pds_details.py index f490e9b26..a5f159588 100644 --- a/lambdas/id_sync/tests/test_pds_details.py +++ b/lambdas/id_sync/tests/test_pds_details.py @@ -1,11 +1,11 @@ import unittest -from unittest.mock import patch, MagicMock -from pds_details import pds_get_patient_details, pds_get_patient_id +from unittest.mock import MagicMock, patch + from exceptions.id_sync_exception import IdSyncException +from pds_details import pds_get_patient_details, pds_get_patient_id class TestGetPdsPatientDetails(unittest.TestCase): - def setUp(self): """Set up test fixtures and mocks""" self.test_patient_id = "9912003888" diff --git a/lambdas/id_sync/tests/test_record_processor.py b/lambdas/id_sync/tests/test_record_processor.py index 19e6e0715..d0e24daa0 100644 --- a/lambdas/id_sync/tests/test_record_processor.py +++ b/lambdas/id_sync/tests/test_record_processor.py @@ -1,10 +1,10 @@ import unittest from unittest.mock import patch + from record_processor import process_record class TestRecordProcessor(unittest.TestCase): - def setUp(self): """Set up test fixtures and mocks""" # Patch logger diff --git a/lambdas/mns_subscription/src/mns_service.py b/lambdas/mns_subscription/src/mns_service.py index cf8e4d7d5..7a6eee6bf 100644 --- a/lambdas/mns_subscription/src/mns_service.py +++ b/lambdas/mns_subscription/src/mns_service.py @@ -1,17 +1,18 @@ -import requests +import json +import logging import os import uuid -import logging -import json + +import requests from common.authentication import AppRestrictedAuth from common.models.errors import ( - UnhandledResponseError, + BadRequestError, + ConflictError, ResourceNotFoundError, - UnauthorizedError, ServerError, - BadRequestError, TokenValidationError, - ConflictError, + UnauthorizedError, + UnhandledResponseError, ) SQS_ARN = os.getenv("SQS_ARN") @@ -48,7 +49,6 @@ def __init__(self, authenticator: AppRestrictedAuth): logging.info(f"Using SQS ARN for subscription: {SQS_ARN}") def subscribe_notification(self) -> dict | None: - response = requests.post( MNS_URL, headers=self.request_headers, diff --git a/lambdas/mns_subscription/src/mns_setup.py b/lambdas/mns_subscription/src/mns_setup.py index 11012096a..ea39e3642 100644 --- a/lambdas/mns_subscription/src/mns_setup.py +++ b/lambdas/mns_subscription/src/mns_setup.py @@ -1,5 +1,6 @@ -import boto3 import logging + +import boto3 from botocore.config import Config from common.authentication import AppRestrictedAuth, Service from common.cache import Cache diff --git a/lambdas/mns_subscription/src/subscribe_mns.py b/lambdas/mns_subscription/src/subscribe_mns.py index 64340769a..78539d044 100644 --- a/lambdas/mns_subscription/src/subscribe_mns.py +++ b/lambdas/mns_subscription/src/subscribe_mns.py @@ -1,4 +1,5 @@ import logging + from mns_setup import get_mns_service diff --git a/lambdas/mns_subscription/src/unsubscribe_mns.py b/lambdas/mns_subscription/src/unsubscribe_mns.py index 789a0c5c0..b934ccff3 100644 --- a/lambdas/mns_subscription/src/unsubscribe_mns.py +++ b/lambdas/mns_subscription/src/unsubscribe_mns.py @@ -1,4 +1,5 @@ import logging + from mns_setup import get_mns_service diff --git a/lambdas/mns_subscription/tests/test_mns_service.py b/lambdas/mns_subscription/tests/test_mns_service.py index f90d5375d..a9fe6d998 100644 --- a/lambdas/mns_subscription/tests/test_mns_service.py +++ b/lambdas/mns_subscription/tests/test_mns_service.py @@ -1,17 +1,17 @@ -import unittest import os -from unittest.mock import patch, MagicMock, Mock, create_autospec -from mns_service import MnsService, MNS_URL +import unittest +from unittest.mock import MagicMock, Mock, create_autospec, patch + from common.authentication import AppRestrictedAuth from common.models.errors import ( + BadRequestError, + ResourceNotFoundError, ServerError, - UnhandledResponseError, TokenValidationError, - BadRequestError, UnauthorizedError, - ResourceNotFoundError, + UnhandledResponseError, ) - +from mns_service import MNS_URL, MnsService SQS_ARN = "arn:aws:sqs:eu-west-2:123456789012:my-queue" @@ -29,7 +29,6 @@ def setUp(self): @patch("mns_service.requests.post") @patch("mns_service.requests.get") def test_successful_subscription(self, mock_get, mock_post): - # Arrange GET to return no subscription found mock_get_response = MagicMock() mock_get_response.status_code = 200 diff --git a/lambdas/mns_subscription/tests/test_mns_setup.py b/lambdas/mns_subscription/tests/test_mns_setup.py index 9616d5232..53aa67941 100644 --- a/lambdas/mns_subscription/tests/test_mns_setup.py +++ b/lambdas/mns_subscription/tests/test_mns_setup.py @@ -1,10 +1,10 @@ import unittest -from unittest.mock import patch, MagicMock +from unittest.mock import MagicMock, patch + from mns_setup import get_mns_service class TestGetMnsService(unittest.TestCase): - @patch("mns_setup.boto3.client") @patch("mns_setup.AppRestrictedAuth") @patch("mns_setup.MnsService") diff --git a/lambdas/mns_subscription/tests/test_subscribe_mns.py b/lambdas/mns_subscription/tests/test_subscribe_mns.py index ea3dcad41..f62c56560 100644 --- a/lambdas/mns_subscription/tests/test_subscribe_mns.py +++ b/lambdas/mns_subscription/tests/test_subscribe_mns.py @@ -1,10 +1,10 @@ import unittest -from unittest.mock import patch, MagicMock +from unittest.mock import MagicMock, patch + from subscribe_mns import run_subscription class TestRunSubscription(unittest.TestCase): - @patch("subscribe_mns.get_mns_service") # patch where it's imported/used! def test_run_subscription_success(self, mock_get_mns_service): mock_mns_instance = MagicMock() diff --git a/lambdas/mns_subscription/tests/test_unsubscribe_mns.py b/lambdas/mns_subscription/tests/test_unsubscribe_mns.py index 344ffb77b..aa568d998 100644 --- a/lambdas/mns_subscription/tests/test_unsubscribe_mns.py +++ b/lambdas/mns_subscription/tests/test_unsubscribe_mns.py @@ -1,10 +1,10 @@ import unittest -from unittest.mock import patch, MagicMock +from unittest.mock import MagicMock, patch + from unsubscribe_mns import run_unsubscribe class TestRunUnsubscribe(unittest.TestCase): - @patch("unsubscribe_mns.get_mns_service") def test_run_unsubscribe_success(self, mock_get_mns_service): # Arrange diff --git a/lambdas/redis_sync/src/record_processor.py b/lambdas/redis_sync/src/record_processor.py index e5d732eb2..0daff29f1 100644 --- a/lambdas/redis_sync/src/record_processor.py +++ b/lambdas/redis_sync/src/record_processor.py @@ -1,6 +1,6 @@ -from redis_cacher import RedisCacher from common.clients import logger from common.s3_event import S3EventRecord +from redis_cacher import RedisCacher """ Record Processor diff --git a/lambdas/redis_sync/src/redis_cacher.py b/lambdas/redis_sync/src/redis_cacher.py index aba419e79..45d0a73a3 100644 --- a/lambdas/redis_sync/src/redis_cacher.py +++ b/lambdas/redis_sync/src/redis_cacher.py @@ -1,10 +1,11 @@ "Upload the content from a config file in S3 to ElastiCache (Redis)" import json -from transform_map import transform_map + from common.clients import logger from common.redis_client import get_redis_client from common.s3_reader import S3Reader +from transform_map import transform_map class RedisCacher: diff --git a/lambdas/redis_sync/src/redis_sync.py b/lambdas/redis_sync/src/redis_sync.py index fc2a11da1..710fc144b 100644 --- a/lambdas/redis_sync/src/redis_sync.py +++ b/lambdas/redis_sync/src/redis_sync.py @@ -1,9 +1,9 @@ -from event_read import read_event -from record_processor import process_record from common.clients import STREAM_NAME, logger from common.log_decorator import logging_decorator from common.redis_client import get_redis_client from common.s3_event import S3Event +from event_read import read_event +from record_processor import process_record """ Event Processor @@ -38,7 +38,6 @@ def _process_all_records(s3_records: list) -> dict: @logging_decorator(prefix="redis_sync", stream_name=STREAM_NAME) def handler(event, _): - try: no_records = "No records found in event" # check if the event requires a read, ie {"read": "my-hashmap"} diff --git a/lambdas/redis_sync/src/transform_map.py b/lambdas/redis_sync/src/transform_map.py index 578ae0cf1..b3ee88f8e 100644 --- a/lambdas/redis_sync/src/transform_map.py +++ b/lambdas/redis_sync/src/transform_map.py @@ -1,10 +1,10 @@ +from common.clients import logger from constants import RedisCacheKey from transform_configs import ( - transform_vaccine_map, transform_supplier_permissions, + transform_vaccine_map, transform_validation_rules, ) -from common.clients import logger """ Transform config file to format required in REDIS cache. diff --git a/lambdas/redis_sync/tests/test_event_read.py b/lambdas/redis_sync/tests/test_event_read.py index 0537471c6..db48d0664 100644 --- a/lambdas/redis_sync/tests/test_event_read.py +++ b/lambdas/redis_sync/tests/test_event_read.py @@ -1,5 +1,6 @@ import unittest from unittest.mock import Mock + from event_read import read_event diff --git a/lambdas/redis_sync/tests/test_handler.py b/lambdas/redis_sync/tests/test_handler.py index 31b995eae..eb2e10e26 100644 --- a/lambdas/redis_sync/tests/test_handler.py +++ b/lambdas/redis_sync/tests/test_handler.py @@ -1,10 +1,11 @@ """unit tests for redis_sync.py""" -import unittest import importlib +import unittest from unittest.mock import patch -from constants import RedisCacheKey + import redis_sync +from constants import RedisCacheKey class TestHandler(unittest.TestCase): diff --git a/lambdas/redis_sync/tests/test_handler_decorator.py b/lambdas/redis_sync/tests/test_handler_decorator.py index ef2484ac2..a61882b45 100644 --- a/lambdas/redis_sync/tests/test_handler_decorator.py +++ b/lambdas/redis_sync/tests/test_handler_decorator.py @@ -1,11 +1,12 @@ """unit tests for redis_sync.py""" -import unittest import json +import unittest from unittest.mock import patch -from redis_sync import handler -from constants import RedisCacheKey + from common.s3_event import S3EventRecord +from constants import RedisCacheKey +from redis_sync import handler class TestHandlerDecorator(unittest.TestCase): diff --git a/lambdas/redis_sync/tests/test_record_processor.py b/lambdas/redis_sync/tests/test_record_processor.py index 6a3c6f339..43a64fad8 100644 --- a/lambdas/redis_sync/tests/test_record_processor.py +++ b/lambdas/redis_sync/tests/test_record_processor.py @@ -1,9 +1,9 @@ -from record_processor import process_record import unittest from unittest.mock import patch -from constants import RedisCacheKey from common.s3_event import S3EventRecord +from constants import RedisCacheKey +from record_processor import process_record class TestRecordProcessor(unittest.TestCase): diff --git a/lambdas/redis_sync/tests/test_redis_cacher.py b/lambdas/redis_sync/tests/test_redis_cacher.py index afdb158cb..5adf1a94f 100644 --- a/lambdas/redis_sync/tests/test_redis_cacher.py +++ b/lambdas/redis_sync/tests/test_redis_cacher.py @@ -1,10 +1,10 @@ import unittest from unittest.mock import patch + from redis_cacher import RedisCacher class TestRedisCacher(unittest.TestCase): - def setUp(self): self.mock_s3_reader = patch("redis_cacher.S3Reader").start() self.mock_transform_map = patch("redis_cacher.transform_map").start() diff --git a/lambdas/redis_sync/tests/test_transform_config.py b/lambdas/redis_sync/tests/test_transform_config.py index 4aa3b18c7..408dd572d 100644 --- a/lambdas/redis_sync/tests/test_transform_config.py +++ b/lambdas/redis_sync/tests/test_transform_config.py @@ -1,9 +1,10 @@ -import unittest import json +import unittest from unittest.mock import patch + from transform_configs import ( - transform_vaccine_map, transform_supplier_permissions, + transform_vaccine_map, transform_validation_rules, ) diff --git a/lambdas/redis_sync/tests/test_transform_map.py b/lambdas/redis_sync/tests/test_transform_map.py index e0279f301..a3e2a3973 100644 --- a/lambdas/redis_sync/tests/test_transform_map.py +++ b/lambdas/redis_sync/tests/test_transform_map.py @@ -1,7 +1,8 @@ import unittest from unittest.mock import patch -from transform_map import transform_map + from constants import RedisCacheKey +from transform_map import transform_map class TestTransformMap(unittest.TestCase): diff --git a/lambdas/shared/src/common/authentication.py b/lambdas/shared/src/common/authentication.py index d9cb75284..16b900439 100644 --- a/lambdas/shared/src/common/authentication.py +++ b/lambdas/shared/src/common/authentication.py @@ -1,15 +1,16 @@ import base64 import json -import jwt -import requests import time import uuid from enum import Enum -from .cache import Cache +import jwt +import requests from common.clients import logger from common.models.errors import UnhandledResponseError +from .cache import Cache + class Service(Enum): PDS = "pds" diff --git a/lambdas/shared/src/common/aws_lambda_event.py b/lambdas/shared/src/common/aws_lambda_event.py index 69eb0dbe1..b73c898c6 100644 --- a/lambdas/shared/src/common/aws_lambda_event.py +++ b/lambdas/shared/src/common/aws_lambda_event.py @@ -1,5 +1,5 @@ -from typing import Dict, Any from enum import Enum +from typing import Any, Dict class AwsEventType(Enum): @@ -10,7 +10,6 @@ class AwsEventType(Enum): class AwsLambdaEvent: - def __init__(self, event: Dict[str, Any]): self.event_source = None self.event_type = AwsEventType.UNKNOWN diff --git a/lambdas/shared/src/common/clients.py b/lambdas/shared/src/common/clients.py index e6a2bde62..de54dbd3d 100644 --- a/lambdas/shared/src/common/clients.py +++ b/lambdas/shared/src/common/clients.py @@ -1,6 +1,8 @@ -import os import logging -from boto3 import client as boto3_client, resource as boto3_resource +import os + +from boto3 import client as boto3_client +from boto3 import resource as boto3_resource logging.basicConfig(level=logging.INFO) logger = logging.getLogger() diff --git a/lambdas/shared/src/common/log_decorator.py b/lambdas/shared/src/common/log_decorator.py index 725f597e0..63c81610b 100644 --- a/lambdas/shared/src/common/log_decorator.py +++ b/lambdas/shared/src/common/log_decorator.py @@ -8,7 +8,8 @@ import time from datetime import datetime from functools import wraps -from common.clients import logger, firehose_client + +from common.clients import firehose_client, logger def send_log_to_firehose(stream_name, log_data: dict) -> None: diff --git a/lambdas/shared/src/common/pds_service.py b/lambdas/shared/src/common/pds_service.py index 6bdeeaa38..78c6d0a2c 100644 --- a/lambdas/shared/src/common/pds_service.py +++ b/lambdas/shared/src/common/pds_service.py @@ -1,6 +1,6 @@ -import requests import uuid +import requests from common.authentication import AppRestrictedAuth from common.clients import logger from common.models.errors import UnhandledResponseError diff --git a/lambdas/shared/src/common/redis_client.py b/lambdas/shared/src/common/redis_client.py index da530b192..87ae876f0 100644 --- a/lambdas/shared/src/common/redis_client.py +++ b/lambdas/shared/src/common/redis_client.py @@ -1,4 +1,5 @@ import os + import redis from common.clients import logger diff --git a/lambdas/shared/src/common/s3_reader.py b/lambdas/shared/src/common/s3_reader.py index 28cd00e97..6b0505b91 100644 --- a/lambdas/shared/src/common/s3_reader.py +++ b/lambdas/shared/src/common/s3_reader.py @@ -1,4 +1,4 @@ -from common.clients import s3_client, logger +from common.clients import logger, s3_client class S3Reader: diff --git a/lambdas/shared/tests/test_common/test_authentication.py b/lambdas/shared/tests/test_common/test_authentication.py index 728601e3e..44c956d24 100644 --- a/lambdas/shared/tests/test_common/test_authentication.py +++ b/lambdas/shared/tests/test_common/test_authentication.py @@ -1,13 +1,13 @@ import base64 import json -import responses import time import unittest -from responses import matchers -from unittest.mock import MagicMock, patch, ANY +from unittest.mock import ANY, MagicMock, patch +import responses from common.authentication import AppRestrictedAuth, Service from common.models.errors import UnhandledResponseError +from responses import matchers class TestAuthenticator(unittest.TestCase): diff --git a/lambdas/shared/tests/test_common/test_aws_dynamodb.py b/lambdas/shared/tests/test_common/test_aws_dynamodb.py index 7575d9156..b4a025bb5 100644 --- a/lambdas/shared/tests/test_common/test_aws_dynamodb.py +++ b/lambdas/shared/tests/test_common/test_aws_dynamodb.py @@ -1,10 +1,10 @@ import unittest -from unittest.mock import patch, MagicMock +from unittest.mock import MagicMock, patch + from common.aws_dynamodb import get_dynamodb_table class TestGetIedsTable(unittest.TestCase): - AWS_REGION = "eu-west-2" # Add this missing constant def setUp(self): diff --git a/lambdas/shared/tests/test_common/test_aws_lambda_event.py b/lambdas/shared/tests/test_common/test_aws_lambda_event.py index 3c1f73142..3f4ac8787 100644 --- a/lambdas/shared/tests/test_common/test_aws_lambda_event.py +++ b/lambdas/shared/tests/test_common/test_aws_lambda_event.py @@ -1,9 +1,9 @@ import unittest -from common.aws_lambda_event import AwsLambdaEvent, AwsEventType +from common.aws_lambda_event import AwsEventType, AwsLambdaEvent -class TestAwsLambdaEvent(unittest.TestCase): +class TestAwsLambdaEvent(unittest.TestCase): def setUp(self): """Set up test fixtures""" self.sqs_record_dict = { diff --git a/lambdas/shared/tests/test_common/test_clients.py b/lambdas/shared/tests/test_common/test_clients.py index 561e9c9d6..b6944af45 100644 --- a/lambdas/shared/tests/test_common/test_clients.py +++ b/lambdas/shared/tests/test_common/test_clients.py @@ -1,7 +1,8 @@ -import unittest -from unittest.mock import patch, MagicMock -import logging import importlib +import logging +import unittest +from unittest.mock import MagicMock, patch + import common.clients as clients diff --git a/lambdas/shared/tests/test_common/test_errors.py b/lambdas/shared/tests/test_common/test_errors.py index aae4d66e5..2c1bda8e3 100644 --- a/lambdas/shared/tests/test_common/test_errors.py +++ b/lambdas/shared/tests/test_common/test_errors.py @@ -1,10 +1,10 @@ import unittest from unittest.mock import patch + import src.common.models.errors as errors class TestErrors(unittest.TestCase): - def setUp(self): TEST_UUID = "01234567-89ab-cdef-0123-4567890abcde" # Patch uuid4 diff --git a/lambdas/shared/tests/test_common/test_log_decorator.py b/lambdas/shared/tests/test_common/test_log_decorator.py index 5731762bb..cdcc613a9 100644 --- a/lambdas/shared/tests/test_common/test_log_decorator.py +++ b/lambdas/shared/tests/test_common/test_log_decorator.py @@ -1,17 +1,16 @@ -import unittest -from unittest.mock import patch import json +import unittest from datetime import datetime +from unittest.mock import patch from common.log_decorator import ( - logging_decorator, generate_and_send_logs, + logging_decorator, send_log_to_firehose, ) class TestLogDecorator(unittest.TestCase): - def setUp(self): self.test_stream = "test-stream" self.test_prefix = "test" diff --git a/lambdas/shared/tests/test_common/test_pds_service.py b/lambdas/shared/tests/test_common/test_pds_service.py index 0cdb11138..ebe22063c 100644 --- a/lambdas/shared/tests/test_common/test_pds_service.py +++ b/lambdas/shared/tests/test_common/test_pds_service.py @@ -1,11 +1,11 @@ -import responses import unittest -from responses import matchers from unittest.mock import create_autospec +import responses from common.authentication import AppRestrictedAuth from common.models.errors import UnhandledResponseError from common.pds_service import PdsService +from responses import matchers class TestPdsService(unittest.TestCase): diff --git a/lambdas/shared/tests/test_common/test_redis_client.py b/lambdas/shared/tests/test_common/test_redis_client.py index d965541d2..0ac13e3f8 100644 --- a/lambdas/shared/tests/test_common/test_redis_client.py +++ b/lambdas/shared/tests/test_common/test_redis_client.py @@ -1,11 +1,11 @@ +import importlib import unittest from unittest.mock import patch + import common.redis_client as redis_client -import importlib class TestRedisClient(unittest.TestCase): - REDIS_HOST = "mock-redis-host" REDIS_PORT = 6379 diff --git a/lambdas/shared/tests/test_common/test_s3_event.py b/lambdas/shared/tests/test_common/test_s3_event.py index 302746513..f11cd7901 100644 --- a/lambdas/shared/tests/test_common/test_s3_event.py +++ b/lambdas/shared/tests/test_common/test_s3_event.py @@ -1,10 +1,10 @@ import unittest + from common.aws_lambda_event import AwsEventType from common.s3_event import S3Event class TestS3Event(unittest.TestCase): - def setUp(self): """Set up test fixtures""" self.s3_record_dict = { diff --git a/lambdas/shared/tests/test_common/test_s3_reader.py b/lambdas/shared/tests/test_common/test_s3_reader.py index 4be69a2c7..f56cae4bc 100644 --- a/lambdas/shared/tests/test_common/test_s3_reader.py +++ b/lambdas/shared/tests/test_common/test_s3_reader.py @@ -1,5 +1,6 @@ import unittest -from unittest.mock import patch, MagicMock +from unittest.mock import MagicMock, patch + from common.s3_reader import S3Reader diff --git a/package.json b/package.json index 3ad4849d1..29adda7f0 100644 --- a/package.json +++ b/package.json @@ -23,8 +23,8 @@ "lint-staged": { "*": "prettier --ignore-unknown --write", "*.py": [ - "poetry -P quality_checks run flake8", - "poetry -P quality_checks run black -l 121" + "poetry -P quality_checks run ruff check --fix", + "poetry -P quality_checks run ruff format" ], "*.tf": "terraform fmt", "immunisation-fhir-api.{yaml,json}": "redocly lint --skip-rule=security-defined" diff --git a/quality_checks/.flake8 b/quality_checks/.flake8 deleted file mode 100644 index 7c61ead7c..000000000 --- a/quality_checks/.flake8 +++ /dev/null @@ -1,16 +0,0 @@ -[flake8] -max-line-length = 121 - -# TODO - add flake8-bugbear and switch to the following config? -# extend-select = B950 -# extend-ignore = E203,E501,E701 -extend-ignore = E203,E701 - -exclude = - .git, - __pycache__, - dist, - .venv, - node_modules, - .terraform, - tests, # TODO - we really should be linting tests as well but they're full of line too long errors diff --git a/quality_checks/Makefile b/quality_checks/Makefile index 8f144bddd..8b5ee6ad0 100644 --- a/quality_checks/Makefile +++ b/quality_checks/Makefile @@ -1,8 +1,11 @@ +lint-fix: + poetry run ruff check --fix .. + lint: - poetry run flake8 .. + poetry run ruff check .. format: - poetry run black -l 121 .. + poetry run ruff format .. format-check: - poetry run black -l 121 --check .. + poetry run ruff format --check .. diff --git a/quality_checks/poetry.lock b/quality_checks/poetry.lock index 43185f0e3..0d0ccc461 100644 --- a/quality_checks/poetry.lock +++ b/quality_checks/poetry.lock @@ -1,201 +1,35 @@ # This file is automatically @generated by Poetry 2.1.4 and should not be changed by hand. [[package]] -name = "black" -version = "25.9.0" -description = "The uncompromising code formatter." -optional = false -python-versions = ">=3.9" -groups = ["main"] -files = [ - {file = "black-25.9.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:ce41ed2614b706fd55fd0b4a6909d06b5bab344ffbfadc6ef34ae50adba3d4f7"}, - {file = "black-25.9.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:2ab0ce111ef026790e9b13bd216fa7bc48edd934ffc4cbf78808b235793cbc92"}, - {file = "black-25.9.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:f96b6726d690c96c60ba682955199f8c39abc1ae0c3a494a9c62c0184049a713"}, - {file = "black-25.9.0-cp310-cp310-win_amd64.whl", hash = "sha256:d119957b37cc641596063cd7db2656c5be3752ac17877017b2ffcdb9dfc4d2b1"}, - {file = "black-25.9.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:456386fe87bad41b806d53c062e2974615825c7a52159cde7ccaeb0695fa28fa"}, - {file = "black-25.9.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:a16b14a44c1af60a210d8da28e108e13e75a284bf21a9afa6b4571f96ab8bb9d"}, - {file = "black-25.9.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:aaf319612536d502fdd0e88ce52d8f1352b2c0a955cc2798f79eeca9d3af0608"}, - {file = "black-25.9.0-cp311-cp311-win_amd64.whl", hash = "sha256:c0372a93e16b3954208417bfe448e09b0de5cc721d521866cd9e0acac3c04a1f"}, - {file = "black-25.9.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:1b9dc70c21ef8b43248f1d86aedd2aaf75ae110b958a7909ad8463c4aa0880b0"}, - {file = "black-25.9.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:8e46eecf65a095fa62e53245ae2795c90bdecabd53b50c448d0a8bcd0d2e74c4"}, - {file = "black-25.9.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:9101ee58ddc2442199a25cb648d46ba22cd580b00ca4b44234a324e3ec7a0f7e"}, - {file = "black-25.9.0-cp312-cp312-win_amd64.whl", hash = "sha256:77e7060a00c5ec4b3367c55f39cf9b06e68965a4f2e61cecacd6d0d9b7ec945a"}, - {file = "black-25.9.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:0172a012f725b792c358d57fe7b6b6e8e67375dd157f64fa7a3097b3ed3e2175"}, - {file = "black-25.9.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:3bec74ee60f8dfef564b573a96b8930f7b6a538e846123d5ad77ba14a8d7a64f"}, - {file = "black-25.9.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:b756fc75871cb1bcac5499552d771822fd9db5a2bb8db2a7247936ca48f39831"}, - {file = "black-25.9.0-cp313-cp313-win_amd64.whl", hash = "sha256:846d58e3ce7879ec1ffe816bb9df6d006cd9590515ed5d17db14e17666b2b357"}, - {file = "black-25.9.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:ef69351df3c84485a8beb6f7b8f9721e2009e20ef80a8d619e2d1788b7816d47"}, - {file = "black-25.9.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:e3c1f4cd5e93842774d9ee4ef6cd8d17790e65f44f7cdbaab5f2cf8ccf22a823"}, - {file = "black-25.9.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:154b06d618233fe468236ba1f0e40823d4eb08b26f5e9261526fde34916b9140"}, - {file = "black-25.9.0-cp39-cp39-win_amd64.whl", hash = "sha256:e593466de7b998374ea2585a471ba90553283fb9beefcfa430d84a2651ed5933"}, - {file = "black-25.9.0-py3-none-any.whl", hash = "sha256:474b34c1342cdc157d307b56c4c65bce916480c4a8f6551fdc6bf9b486a7c4ae"}, - {file = "black-25.9.0.tar.gz", hash = "sha256:0474bca9a0dd1b51791fcc507a4e02078a1c63f6d4e4ae5544b9848c7adfb619"}, -] - -[package.dependencies] -click = ">=8.0.0" -mypy-extensions = ">=0.4.3" -packaging = ">=22.0" -pathspec = ">=0.9.0" -platformdirs = ">=2" -pytokens = ">=0.1.10" - -[package.extras] -colorama = ["colorama (>=0.4.3)"] -d = ["aiohttp (>=3.10)"] -jupyter = ["ipython (>=7.8.0)", "tokenize-rt (>=3.2.0)"] -uvloop = ["uvloop (>=0.15.2)"] - -[[package]] -name = "click" -version = "8.3.0" -description = "Composable command line interface toolkit" -optional = false -python-versions = ">=3.10" -groups = ["main"] -files = [ - {file = "click-8.3.0-py3-none-any.whl", hash = "sha256:9b9f285302c6e3064f4330c05f05b81945b2a39544279343e6e7c5f27a9baddc"}, - {file = "click-8.3.0.tar.gz", hash = "sha256:e7b8232224eba16f4ebe410c25ced9f7875cb5f3263ffc93cc3e8da705e229c4"}, -] - -[package.dependencies] -colorama = {version = "*", markers = "platform_system == \"Windows\""} - -[[package]] -name = "colorama" -version = "0.4.6" -description = "Cross-platform colored terminal text." -optional = false -python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" -groups = ["main"] -markers = "platform_system == \"Windows\"" -files = [ - {file = "colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6"}, - {file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"}, -] - -[[package]] -name = "flake8" -version = "7.3.0" -description = "the modular source code checker: pep8 pyflakes and co" -optional = false -python-versions = ">=3.9" -groups = ["main"] -files = [ - {file = "flake8-7.3.0-py2.py3-none-any.whl", hash = "sha256:b9696257b9ce8beb888cdbe31cf885c90d31928fe202be0889a7cdafad32f01e"}, - {file = "flake8-7.3.0.tar.gz", hash = "sha256:fe044858146b9fc69b551a4b490d69cf960fcb78ad1edcb84e7fbb1b4a8e3872"}, +name = "ruff" +version = "0.14.0" +description = "An extremely fast Python linter and code formatter, written in Rust." +optional = false +python-versions = ">=3.7" +groups = ["main"] +files = [ + {file = "ruff-0.14.0-py3-none-linux_armv6l.whl", hash = "sha256:58e15bffa7054299becf4bab8a1187062c6f8cafbe9f6e39e0d5aface455d6b3"}, + {file = "ruff-0.14.0-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:838d1b065f4df676b7c9957992f2304e41ead7a50a568185efd404297d5701e8"}, + {file = "ruff-0.14.0-py3-none-macosx_11_0_arm64.whl", hash = "sha256:703799d059ba50f745605b04638fa7e9682cc3da084b2092feee63500ff3d9b8"}, + {file = "ruff-0.14.0-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3ba9a8925e90f861502f7d974cc60e18ca29c72bb0ee8bfeabb6ade35a3abde7"}, + {file = "ruff-0.14.0-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:e41f785498bd200ffc276eb9e1570c019c1d907b07cfb081092c8ad51975bbe7"}, + {file = "ruff-0.14.0-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:30a58c087aef4584c193aebf2700f0fbcfc1e77b89c7385e3139956fa90434e2"}, + {file = "ruff-0.14.0-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:f8d07350bc7af0a5ce8812b7d5c1a7293cf02476752f23fdfc500d24b79b783c"}, + {file = "ruff-0.14.0-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:eec3bbbf3a7d5482b5c1f42d5fc972774d71d107d447919fca620b0be3e3b75e"}, + {file = "ruff-0.14.0-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:16b68e183a0e28e5c176d51004aaa40559e8f90065a10a559176713fcf435206"}, + {file = "ruff-0.14.0-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:eb732d17db2e945cfcbbc52af0143eda1da36ca8ae25083dd4f66f1542fdf82e"}, + {file = "ruff-0.14.0-py3-none-manylinux_2_31_riscv64.whl", hash = "sha256:c958f66ab884b7873e72df38dcabee03d556a8f2ee1b8538ee1c2bbd619883dd"}, + {file = "ruff-0.14.0-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:7eb0499a2e01f6e0c285afc5bac43ab380cbfc17cd43a2e1dd10ec97d6f2c42d"}, + {file = "ruff-0.14.0-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:4c63b2d99fafa05efca0ab198fd48fa6030d57e4423df3f18e03aa62518c565f"}, + {file = "ruff-0.14.0-py3-none-musllinux_1_2_i686.whl", hash = "sha256:668fce701b7a222f3f5327f86909db2bbe99c30877c8001ff934c5413812ac02"}, + {file = "ruff-0.14.0-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:a86bf575e05cb68dcb34e4c7dfe1064d44d3f0c04bbc0491949092192b515296"}, + {file = "ruff-0.14.0-py3-none-win32.whl", hash = "sha256:7450a243d7125d1c032cb4b93d9625dea46c8c42b4f06c6b709baac168e10543"}, + {file = "ruff-0.14.0-py3-none-win_amd64.whl", hash = "sha256:ea95da28cd874c4d9c922b39381cbd69cb7e7b49c21b8152b014bd4f52acddc2"}, + {file = "ruff-0.14.0-py3-none-win_arm64.whl", hash = "sha256:f42c9495f5c13ff841b1da4cb3c2a42075409592825dada7c5885c2c844ac730"}, + {file = "ruff-0.14.0.tar.gz", hash = "sha256:62ec8969b7510f77945df916de15da55311fade8d6050995ff7f680afe582c57"}, ] -[package.dependencies] -mccabe = ">=0.7.0,<0.8.0" -pycodestyle = ">=2.14.0,<2.15.0" -pyflakes = ">=3.4.0,<3.5.0" - -[[package]] -name = "mccabe" -version = "0.7.0" -description = "McCabe checker, plugin for flake8" -optional = false -python-versions = ">=3.6" -groups = ["main"] -files = [ - {file = "mccabe-0.7.0-py2.py3-none-any.whl", hash = "sha256:6c2d30ab6be0e4a46919781807b4f0d834ebdd6c6e3dca0bda5a15f863427b6e"}, - {file = "mccabe-0.7.0.tar.gz", hash = "sha256:348e0240c33b60bbdf4e523192ef919f28cb2c3d7d5c7794f74009290f236325"}, -] - -[[package]] -name = "mypy-extensions" -version = "1.1.0" -description = "Type system extensions for programs checked with the mypy type checker." -optional = false -python-versions = ">=3.8" -groups = ["main"] -files = [ - {file = "mypy_extensions-1.1.0-py3-none-any.whl", hash = "sha256:1be4cccdb0f2482337c4743e60421de3a356cd97508abadd57d47403e94f5505"}, - {file = "mypy_extensions-1.1.0.tar.gz", hash = "sha256:52e68efc3284861e772bbcd66823fde5ae21fd2fdb51c62a211403730b916558"}, -] - -[[package]] -name = "packaging" -version = "25.0" -description = "Core utilities for Python packages" -optional = false -python-versions = ">=3.8" -groups = ["main"] -files = [ - {file = "packaging-25.0-py3-none-any.whl", hash = "sha256:29572ef2b1f17581046b3a2227d5c611fb25ec70ca1ba8554b24b0e69331a484"}, - {file = "packaging-25.0.tar.gz", hash = "sha256:d443872c98d677bf60f6a1f2f8c1cb748e8fe762d2bf9d3148b5599295b0fc4f"}, -] - -[[package]] -name = "pathspec" -version = "0.12.1" -description = "Utility library for gitignore style pattern matching of file paths." -optional = false -python-versions = ">=3.8" -groups = ["main"] -files = [ - {file = "pathspec-0.12.1-py3-none-any.whl", hash = "sha256:a0d503e138a4c123b27490a4f7beda6a01c6f288df0e4a8b79c7eb0dc7b4cc08"}, - {file = "pathspec-0.12.1.tar.gz", hash = "sha256:a482d51503a1ab33b1c67a6c3813a26953dbdc71c31dacaef9a838c4e29f5712"}, -] - -[[package]] -name = "platformdirs" -version = "4.4.0" -description = "A small Python package for determining appropriate platform-specific dirs, e.g. a `user data dir`." -optional = false -python-versions = ">=3.9" -groups = ["main"] -files = [ - {file = "platformdirs-4.4.0-py3-none-any.whl", hash = "sha256:abd01743f24e5287cd7a5db3752faf1a2d65353f38ec26d98e25a6db65958c85"}, - {file = "platformdirs-4.4.0.tar.gz", hash = "sha256:ca753cf4d81dc309bc67b0ea38fd15dc97bc30ce419a7f58d13eb3bf14c4febf"}, -] - -[package.extras] -docs = ["furo (>=2024.8.6)", "proselint (>=0.14)", "sphinx (>=8.1.3)", "sphinx-autodoc-typehints (>=3)"] -test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=8.3.4)", "pytest-cov (>=6)", "pytest-mock (>=3.14)"] -type = ["mypy (>=1.14.1)"] - -[[package]] -name = "pycodestyle" -version = "2.14.0" -description = "Python style guide checker" -optional = false -python-versions = ">=3.9" -groups = ["main"] -files = [ - {file = "pycodestyle-2.14.0-py2.py3-none-any.whl", hash = "sha256:dd6bf7cb4ee77f8e016f9c8e74a35ddd9f67e1d5fd4184d86c3b98e07099f42d"}, - {file = "pycodestyle-2.14.0.tar.gz", hash = "sha256:c4b5b517d278089ff9d0abdec919cd97262a3367449ea1c8b49b91529167b783"}, -] - -[[package]] -name = "pyflakes" -version = "3.4.0" -description = "passive checker of Python programs" -optional = false -python-versions = ">=3.9" -groups = ["main"] -files = [ - {file = "pyflakes-3.4.0-py2.py3-none-any.whl", hash = "sha256:f742a7dbd0d9cb9ea41e9a24a918996e8170c799fa528688d40dd582c8265f4f"}, - {file = "pyflakes-3.4.0.tar.gz", hash = "sha256:b24f96fafb7d2ab0ec5075b7350b3d2d2218eab42003821c06344973d3ea2f58"}, -] - -[[package]] -name = "pytokens" -version = "0.1.10" -description = "A Fast, spec compliant Python 3.12+ tokenizer that runs on older Pythons." -optional = false -python-versions = ">=3.8" -groups = ["main"] -files = [ - {file = "pytokens-0.1.10-py3-none-any.whl", hash = "sha256:db7b72284e480e69fb085d9f251f66b3d2df8b7166059261258ff35f50fb711b"}, - {file = "pytokens-0.1.10.tar.gz", hash = "sha256:c9a4bfa0be1d26aebce03e6884ba454e842f186a59ea43a6d3b25af58223c044"}, -] - -[package.extras] -dev = ["black", "build", "mypy", "pytest", "pytest-cov", "setuptools", "tox", "twine", "wheel"] - [metadata] lock-version = "2.1" python-versions = ">=3.11" -content-hash = "70c45a4fa3975c3574bbf014931571fa196ed9f09d3da9bce3fbe086d6db048f" +content-hash = "0fd5cb0b1adfe11fa1a428906fa7f1aa06a0a24b694d4e35d51086b8ca30b97d" diff --git a/quality_checks/pyproject.toml b/quality_checks/pyproject.toml index ce53a3710..5ad8c23d2 100644 --- a/quality_checks/pyproject.toml +++ b/quality_checks/pyproject.toml @@ -8,11 +8,9 @@ authors = [ readme = "README.md" requires-python = ">=3.11" dependencies = [ - "flake8 (>=7.3.0,<8.0.0)", - "black (>=25.9.0,<26.0.0)", + "ruff (>=0.14.0,<0.15.0)", ] - [build-system] requires = ["poetry-core>=2.0.0,<3.0.0"] build-backend = "poetry.core.masonry.api" diff --git a/recordprocessor/src/audit_table.py b/recordprocessor/src/audit_table.py index e2948fdd2..aceca1412 100644 --- a/recordprocessor/src/audit_table.py +++ b/recordprocessor/src/audit_table.py @@ -3,8 +3,8 @@ from typing import Optional from clients import dynamodb_client, logger -from errors import UnhandledAuditTableError from constants import AUDIT_TABLE_NAME, AuditTableKeys +from errors import UnhandledAuditTableError def update_audit_table_status( diff --git a/recordprocessor/src/batch_processor.py b/recordprocessor/src/batch_processor.py index 381ed5496..2b480d232 100644 --- a/recordprocessor/src/batch_processor.py +++ b/recordprocessor/src/batch_processor.py @@ -5,22 +5,22 @@ import time from csv import DictReader from json import JSONDecodeError +from typing import Optional +from audit_table import update_audit_table_status +from clients import logger from constants import ( - FileStatus, - FileNotProcessedReason, - SOURCE_BUCKET_NAME, ARCHIVE_DIR_NAME, PROCESSING_DIR_NAME, + SOURCE_BUCKET_NAME, + FileNotProcessedReason, + FileStatus, ) -from process_row import process_row +from file_level_validation import file_is_empty, file_level_validation, move_file from mappings import map_target_disease -from audit_table import update_audit_table_status +from process_row import process_row from send_to_kinesis import send_to_kinesis -from clients import logger -from file_level_validation import file_level_validation, file_is_empty, move_file from utils_for_recordprocessor import get_csv_content_dict_reader -from typing import Optional def process_csv_to_fhir(incoming_message_body: dict) -> int: @@ -125,9 +125,9 @@ def process_rows( logger.info("MESSAGE ID : %s", row_id) # Log progress every 1000 rows and the first 10 rows after a restart if total_rows_processed_count % 1000 == 0: - logger.info(f"Process: {total_rows_processed_count+1}") + logger.info(f"Process: {total_rows_processed_count + 1}") if start_row > 0 and row_count <= start_row + 10: - logger.info(f"Restarted Process (log up to first 10): {total_rows_processed_count+1}") + logger.info(f"Restarted Process (log up to first 10): {total_rows_processed_count + 1}") # Process the row to obtain the details needed for the message_body and ack file details_from_processing = process_row(target_disease, allowed_operations, row) # Create the message body for sending diff --git a/recordprocessor/src/clients.py b/recordprocessor/src/clients.py index 4628d36e0..79f7b494b 100644 --- a/recordprocessor/src/clients.py +++ b/recordprocessor/src/clients.py @@ -4,7 +4,8 @@ import os import redis -from boto3 import client as boto3_client, resource as boto3_resource +from boto3 import client as boto3_client +from boto3 import resource as boto3_resource from botocore.config import Config REGION_NAME = "eu-west-2" diff --git a/recordprocessor/src/convert_to_fhir_imms_resource.py b/recordprocessor/src/convert_to_fhir_imms_resource.py index 08ef17624..0731c3562 100644 --- a/recordprocessor/src/convert_to_fhir_imms_resource.py +++ b/recordprocessor/src/convert_to_fhir_imms_resource.py @@ -1,10 +1,9 @@ """Decorators to add the relevant fields to the FHIR immunization resource from the batch stream""" -from typing import List, Callable, Dict +from typing import Callable, Dict, List -from utils_for_fhir_conversion import _is_not_empty, Generate, Add, Convert from constants import Operation, Urls - +from utils_for_fhir_conversion import Add, Convert, Generate, _is_not_empty ImmunizationDecorator = Callable[[Dict, Dict[str, str]], None] """ @@ -43,7 +42,6 @@ def _decorate_patient(imms: dict, row: Dict[str, str]) -> None: # Add patient if there is at least one non-empty patient value if any(_is_not_empty(value) for value in patient_values): - # Set up patient internal_patient_id = "Patient1" imms["patient"] = {"reference": f"#{internal_patient_id}"} @@ -207,7 +205,6 @@ def _decorate_performer(imms: dict, row: Dict[str, str]) -> None: # Add practitioner if there is at least one practitioner value if any(_is_not_empty(value) for value in practitioner_values): - # Set up the practitioner internal_practitioner_id = "Practitioner1" practitioner = { diff --git a/recordprocessor/src/file_level_validation.py b/recordprocessor/src/file_level_validation.py index 1bcf62359..d379354aa 100644 --- a/recordprocessor/src/file_level_validation.py +++ b/recordprocessor/src/file_level_validation.py @@ -5,22 +5,22 @@ from csv import DictReader -from clients import logger, s3_client -from make_and_upload_ack_file import make_and_upload_ack_file -from utils_for_recordprocessor import get_csv_content_dict_reader -from errors import InvalidHeaders, NoOperationPermissions -from logging_decorator import file_level_validation_logging_decorator from audit_table import update_audit_table_status +from clients import logger, s3_client from constants import ( - SOURCE_BUCKET_NAME, + ARCHIVE_DIR_NAME, EXPECTED_CSV_HEADERS, - permission_to_operation_map, + PROCESSING_DIR_NAME, + SOURCE_BUCKET_NAME, + FileNotProcessedReason, FileStatus, Permission, - FileNotProcessedReason, - ARCHIVE_DIR_NAME, - PROCESSING_DIR_NAME, + permission_to_operation_map, ) +from errors import InvalidHeaders, NoOperationPermissions +from logging_decorator import file_level_validation_logging_decorator +from make_and_upload_ack_file import make_and_upload_ack_file +from utils_for_recordprocessor import get_csv_content_dict_reader def validate_content_headers(csv_content_reader: DictReader) -> None: diff --git a/recordprocessor/src/logging_decorator.py b/recordprocessor/src/logging_decorator.py index 4901f4e52..8947be872 100644 --- a/recordprocessor/src/logging_decorator.py +++ b/recordprocessor/src/logging_decorator.py @@ -5,8 +5,9 @@ import time from datetime import datetime from functools import wraps + from clients import firehose_client, logger -from errors import NoOperationPermissions, InvalidHeaders +from errors import InvalidHeaders, NoOperationPermissions STREAM_NAME = os.getenv("SPLUNK_FIREHOSE_NAME", "immunisation-fhir-api-internal-dev-splunk-firehose") diff --git a/recordprocessor/src/make_and_upload_ack_file.py b/recordprocessor/src/make_and_upload_ack_file.py index d71e137d4..68854697f 100644 --- a/recordprocessor/src/make_and_upload_ack_file.py +++ b/recordprocessor/src/make_and_upload_ack_file.py @@ -1,7 +1,8 @@ """Create ack file and upload to S3 bucket""" from csv import writer -from io import StringIO, BytesIO +from io import BytesIO, StringIO + from clients import s3_client from constants import ACK_BUCKET_NAME diff --git a/recordprocessor/src/mappings.py b/recordprocessor/src/mappings.py index c82e6ef4c..7871a0294 100644 --- a/recordprocessor/src/mappings.py +++ b/recordprocessor/src/mappings.py @@ -1,8 +1,9 @@ """Mappings for converting vaccine type into target disease FHIR element""" import json -from constants import Urls + from clients import redis_client +from constants import Urls def map_target_disease(vaccine: str) -> list: diff --git a/recordprocessor/src/models/utils.py b/recordprocessor/src/models/utils.py index 33b1b4397..ddb70163d 100644 --- a/recordprocessor/src/models/utils.py +++ b/recordprocessor/src/models/utils.py @@ -1,7 +1,7 @@ import uuid from dataclasses import dataclass -from typing import Union from enum import Enum +from typing import Union class Severity(str, Enum): diff --git a/recordprocessor/src/process_row.py b/recordprocessor/src/process_row.py index 88dd65902..d9cb52ec4 100644 --- a/recordprocessor/src/process_row.py +++ b/recordprocessor/src/process_row.py @@ -1,8 +1,8 @@ """Function to process a single row of a csv file""" -from convert_to_fhir_imms_resource import convert_to_fhir_imms_resource -from constants import Diagnostics from clients import logger +from constants import Diagnostics +from convert_to_fhir_imms_resource import convert_to_fhir_imms_resource from utils_for_recordprocessor import create_diagnostics_dictionary diff --git a/recordprocessor/src/send_to_kinesis.py b/recordprocessor/src/send_to_kinesis.py index 16419a501..2184d499e 100644 --- a/recordprocessor/src/send_to_kinesis.py +++ b/recordprocessor/src/send_to_kinesis.py @@ -1,6 +1,7 @@ """Function to send the message to kinesis""" import os + import simplejson as json from botocore.exceptions import ClientError from clients import kinesis_client, logger diff --git a/recordprocessor/src/utils_for_fhir_conversion.py b/recordprocessor/src/utils_for_fhir_conversion.py index e25b4418d..8e91ed63a 100644 --- a/recordprocessor/src/utils_for_fhir_conversion.py +++ b/recordprocessor/src/utils_for_fhir_conversion.py @@ -3,6 +3,7 @@ import re from datetime import datetime from decimal import Decimal, InvalidOperation + from constants import Urls diff --git a/recordprocessor/src/utils_for_recordprocessor.py b/recordprocessor/src/utils_for_recordprocessor.py index 1c874d010..b11dee656 100644 --- a/recordprocessor/src/utils_for_recordprocessor.py +++ b/recordprocessor/src/utils_for_recordprocessor.py @@ -3,6 +3,7 @@ import os from csv import DictReader from io import TextIOWrapper + from clients import s3_client diff --git a/recordprocessor/tests/test_audit_table.py b/recordprocessor/tests/test_audit_table.py index 91fd79c44..85e7d5cf7 100644 --- a/recordprocessor/tests/test_audit_table.py +++ b/recordprocessor/tests/test_audit_table.py @@ -3,35 +3,35 @@ import unittest from unittest import TestCase from unittest.mock import patch + from boto3 import client as boto3_client -from moto import mock_dynamodb from errors import UnhandledAuditTableError +from moto import mock_dynamodb -from tests.utils_for_recordprocessor_tests.mock_environment_variables import ( - MOCK_ENVIRONMENT_DICT, -) from tests.utils_for_recordprocessor_tests.generic_setup_and_teardown import ( GenericSetUp, GenericTearDown, ) -from tests.utils_for_recordprocessor_tests.values_for_recordprocessor_tests import ( - MockFileDetails, - FileDetails, +from tests.utils_for_recordprocessor_tests.mock_environment_variables import ( + MOCK_ENVIRONMENT_DICT, ) from tests.utils_for_recordprocessor_tests.utils_for_recordprocessor_tests import ( add_entry_to_table, ) +from tests.utils_for_recordprocessor_tests.values_for_recordprocessor_tests import ( + FileDetails, + MockFileDetails, +) # Ensure environment variables are mocked before importing from src files with patch.dict("os.environ", MOCK_ENVIRONMENT_DICT): + from audit_table import update_audit_table_status + from clients import REGION_NAME from constants import ( AUDIT_TABLE_NAME, FileStatus, ) - from audit_table import update_audit_table_status - from clients import REGION_NAME - dynamodb_client = boto3_client("dynamodb", region_name=REGION_NAME) diff --git a/recordprocessor/tests/test_convert_to_fhir_imms_decorators.py b/recordprocessor/tests/test_convert_to_fhir_imms_decorators.py index a3f12fbb3..4a8bf0e97 100644 --- a/recordprocessor/tests/test_convert_to_fhir_imms_decorators.py +++ b/recordprocessor/tests/test_convert_to_fhir_imms_decorators.py @@ -5,32 +5,32 @@ NOTE: the public function `decorate` is tested in `TestDecorate` class. """ -from decimal import Decimal import copy import unittest +from decimal import Decimal from unittest.mock import patch from tests.utils_for_recordprocessor_tests.decorator_constants import ( + RSV_TARGET_DISEASE_ELEMENT, AllHeaders, AllHeadersExpectedOutput, ExtensionItems, - RSV_TARGET_DISEASE_ELEMENT, -) -from tests.utils_for_recordprocessor_tests.values_for_recordprocessor_tests import ( - MockFieldDictionaries, ) from tests.utils_for_recordprocessor_tests.mock_environment_variables import ( MOCK_ENVIRONMENT_DICT, ) +from tests.utils_for_recordprocessor_tests.values_for_recordprocessor_tests import ( + MockFieldDictionaries, +) with patch("os.environ", MOCK_ENVIRONMENT_DICT): from constants import Urls from convert_to_fhir_imms_resource import ( + _decorate_immunization, _decorate_patient, + _decorate_performer, _decorate_vaccination, _decorate_vaccine, - _decorate_performer, - _decorate_immunization, ) empty_csv_data = {k: "" for k in MockFieldDictionaries.all_fields} diff --git a/recordprocessor/tests/test_convert_to_fhir_imms_resource.py b/recordprocessor/tests/test_convert_to_fhir_imms_resource.py index 27dd97921..adb1e2c24 100644 --- a/recordprocessor/tests/test_convert_to_fhir_imms_resource.py +++ b/recordprocessor/tests/test_convert_to_fhir_imms_resource.py @@ -1,25 +1,25 @@ """Tests for convert_to_fhir_imms_resource""" import unittest -from typing import Tuple, List +from typing import List, Tuple from unittest.mock import patch +from tests.utils_for_recordprocessor_tests.mock_environment_variables import ( + MOCK_ENVIRONMENT_DICT, +) from tests.utils_for_recordprocessor_tests.values_for_recordprocessor_tests import ( MockFhirImmsResources, MockFieldDictionaries, TargetDiseaseElements, ) -from tests.utils_for_recordprocessor_tests.mock_environment_variables import ( - MOCK_ENVIRONMENT_DICT, -) with patch("os.environ", MOCK_ENVIRONMENT_DICT): from convert_to_fhir_imms_resource import ( + ImmunizationDecorator, _decorate_immunization, _get_decorators_for_action_flag, all_decorators, convert_to_fhir_imms_resource, - ImmunizationDecorator, ) diff --git a/recordprocessor/tests/test_file_level_validation.py b/recordprocessor/tests/test_file_level_validation.py index b94b6aa07..8a0e807a4 100644 --- a/recordprocessor/tests/test_file_level_validation.py +++ b/recordprocessor/tests/test_file_level_validation.py @@ -5,20 +5,19 @@ # If mock_s3 is not imported here then tests in other files fail when running 'make test'. It is not clear why this is. from moto import mock_s3 # noqa: F401 + from tests.utils_for_recordprocessor_tests.utils_for_recordprocessor_tests import ( convert_string_to_dict_reader, ) from tests.utils_for_recordprocessor_tests.values_for_recordprocessor_tests import ( + MOCK_ENVIRONMENT_DICT, MockFileDetails, ValidMockFileContent, ) -from tests.utils_for_recordprocessor_tests.values_for_recordprocessor_tests import ( - MOCK_ENVIRONMENT_DICT, -) with patch("os.environ", MOCK_ENVIRONMENT_DICT): - from errors import NoOperationPermissions, InvalidHeaders - from file_level_validation import validate_content_headers, get_permitted_operations + from errors import InvalidHeaders, NoOperationPermissions + from file_level_validation import get_permitted_operations, validate_content_headers test_file = MockFileDetails.rsv_emis diff --git a/recordprocessor/tests/test_logging_decorator.py b/recordprocessor/tests/test_logging_decorator.py index c8359b230..05831e70e 100644 --- a/recordprocessor/tests/test_logging_decorator.py +++ b/recordprocessor/tests/test_logging_decorator.py @@ -1,29 +1,30 @@ """Tests for the logging_decorator and its helper functions""" +import json import unittest -from unittest.mock import patch from contextlib import ExitStack -from datetime import datetime -import json from copy import deepcopy +from datetime import datetime +from unittest.mock import patch + from boto3 import client as boto3_client -from moto import mock_s3, mock_firehose +from moto import mock_firehose, mock_s3 -from tests.utils_for_recordprocessor_tests.values_for_recordprocessor_tests import ( - MockFileDetails, - ValidMockFileContent, -) from tests.utils_for_recordprocessor_tests.mock_environment_variables import ( MOCK_ENVIRONMENT_DICT, BucketNames, Firehose, ) +from tests.utils_for_recordprocessor_tests.values_for_recordprocessor_tests import ( + MockFileDetails, + ValidMockFileContent, +) with patch.dict("os.environ", MOCK_ENVIRONMENT_DICT): from clients import REGION_NAME from errors import InvalidHeaders, NoOperationPermissions - from logging_decorator import send_log_to_firehose, generate_and_send_logs from file_level_validation import file_level_validation + from logging_decorator import generate_and_send_logs, send_log_to_firehose from tests.utils_for_recordprocessor_tests.utils_for_recordprocessor_tests import ( @@ -203,7 +204,6 @@ def test_splunk_logger_handled_failure(self): expected_error_message, ) in test_cases: with self.subTest(expected_error_message): - s3_client.put_object( Bucket=BucketNames.SOURCE, Key=MOCK_FILE_DETAILS.file_key, diff --git a/recordprocessor/tests/test_make_and_upload_ack_file.py b/recordprocessor/tests/test_make_and_upload_ack_file.py index 60bf80139..2ef45d81f 100644 --- a/recordprocessor/tests/test_make_and_upload_ack_file.py +++ b/recordprocessor/tests/test_make_and_upload_ack_file.py @@ -1,10 +1,12 @@ """Tests for make_and_upload_ack_file functions""" import unittest -from unittest.mock import patch from copy import deepcopy +from unittest.mock import patch + from boto3 import client as boto3_client from moto import mock_s3 + from tests.utils_for_recordprocessor_tests.mock_environment_variables import ( MOCK_ENVIRONMENT_DICT, BucketNames, @@ -17,12 +19,12 @@ ) with patch("os.environ", MOCK_ENVIRONMENT_DICT): + from clients import REGION_NAME from make_and_upload_ack_file import ( make_ack_data, - upload_ack_file, make_and_upload_ack_file, + upload_ack_file, ) - from clients import REGION_NAME from tests.utils_for_recordprocessor_tests.utils_for_recordprocessor_tests import ( GenericSetUp, diff --git a/recordprocessor/tests/test_map_target_disease.py b/recordprocessor/tests/test_map_target_disease.py index 5ec0b2aa4..20c353223 100644 --- a/recordprocessor/tests/test_map_target_disease.py +++ b/recordprocessor/tests/test_map_target_disease.py @@ -3,6 +3,7 @@ import json import unittest from unittest.mock import patch + from tests.utils_for_recordprocessor_tests.mock_environment_variables import ( MOCK_ENVIRONMENT_DICT, ) diff --git a/recordprocessor/tests/test_process_csv_to_fhir.py b/recordprocessor/tests/test_process_csv_to_fhir.py index a2af75bc6..7566da32e 100644 --- a/recordprocessor/tests/test_process_csv_to_fhir.py +++ b/recordprocessor/tests/test_process_csv_to_fhir.py @@ -2,31 +2,32 @@ import json import unittest -from unittest.mock import patch from copy import deepcopy +from unittest.mock import patch + import boto3 -from moto import mock_s3, mock_firehose, mock_dynamodb +from moto import mock_dynamodb, mock_firehose, mock_s3 from tests.utils_for_recordprocessor_tests.generic_setup_and_teardown import ( GenericSetUp, GenericTearDown, ) +from tests.utils_for_recordprocessor_tests.mock_environment_variables import ( + MOCK_ENVIRONMENT_DICT, + BucketNames, +) from tests.utils_for_recordprocessor_tests.utils_for_recordprocessor_tests import ( add_entry_to_table, ) from tests.utils_for_recordprocessor_tests.values_for_recordprocessor_tests import ( + REGION_NAME, MockFileDetails, ValidMockFileContent, - REGION_NAME, -) -from tests.utils_for_recordprocessor_tests.mock_environment_variables import ( - MOCK_ENVIRONMENT_DICT, - BucketNames, ) with patch("os.environ", MOCK_ENVIRONMENT_DICT): - from constants import FileStatus, AUDIT_TABLE_NAME from batch_processor import process_csv_to_fhir + from constants import AUDIT_TABLE_NAME, FileStatus dynamodb_client = boto3.client("dynamodb", region_name=REGION_NAME) s3_client = boto3.client("s3", region_name=REGION_NAME) diff --git a/recordprocessor/tests/test_process_row.py b/recordprocessor/tests/test_process_row.py index d730a2fe3..0cdd7b9fa 100644 --- a/recordprocessor/tests/test_process_row.py +++ b/recordprocessor/tests/test_process_row.py @@ -1,24 +1,23 @@ """Tests for the process_row module""" import unittest -from unittest.mock import patch from copy import deepcopy -from boto3 import client as boto3_client -from moto import mock_s3 from decimal import Decimal +from unittest.mock import patch +from boto3 import client as boto3_client +from moto import mock_s3 -from tests.utils_for_recordprocessor_tests.values_for_recordprocessor_tests import ( - MockFieldDictionaries, - TargetDiseaseElements, +from tests.utils_for_recordprocessor_tests.mock_environment_variables import ( + MOCK_ENVIRONMENT_DICT, ) - from tests.utils_for_recordprocessor_tests.utils_for_recordprocessor_tests import ( GenericSetUp, GenericTearDown, ) -from tests.utils_for_recordprocessor_tests.mock_environment_variables import ( - MOCK_ENVIRONMENT_DICT, +from tests.utils_for_recordprocessor_tests.values_for_recordprocessor_tests import ( + MockFieldDictionaries, + TargetDiseaseElements, ) with patch("os.environ", MOCK_ENVIRONMENT_DICT): diff --git a/recordprocessor/tests/test_recordprocessor_edge_cases.py b/recordprocessor/tests/test_recordprocessor_edge_cases.py index 20140e233..7bde32bb1 100644 --- a/recordprocessor/tests/test_recordprocessor_edge_cases.py +++ b/recordprocessor/tests/test_recordprocessor_edge_cases.py @@ -1,15 +1,16 @@ -import unittest import os +import unittest from io import BytesIO from unittest.mock import call, patch + from batch_processor import process_csv_to_fhir + from tests.utils_for_recordprocessor_tests.utils_for_recordprocessor_tests import ( create_patch, ) class TestProcessorEdgeCases(unittest.TestCase): - def setUp(self): self.mock_logger_info = create_patch("logging.Logger.info") self.mock_logger_warning = create_patch("logging.Logger.warning") diff --git a/recordprocessor/tests/test_recordprocessor_main.py b/recordprocessor/tests/test_recordprocessor_main.py index 5e810a62e..0d557b75b 100644 --- a/recordprocessor/tests/test_recordprocessor_main.py +++ b/recordprocessor/tests/test_recordprocessor_main.py @@ -1,48 +1,46 @@ "Tests for main function for RecordProcessor" -import unittest import json +import unittest +from datetime import datetime, timedelta, timezone from decimal import Decimal from json import JSONDecodeError from unittest.mock import patch -from datetime import datetime, timedelta, timezone -from moto import mock_s3, mock_kinesis, mock_firehose, mock_dynamodb -from boto3 import client as boto3_client +from boto3 import client as boto3_client +from moto import mock_dynamodb, mock_firehose, mock_kinesis, mock_s3 +from utils_for_recordprocessor_tests.mock_environment_variables import ( + MOCK_ENVIRONMENT_DICT, + BucketNames, + Kinesis, +) from utils_for_recordprocessor_tests.utils_for_recordprocessor_tests import ( GenericSetUp, GenericTearDown, add_entry_to_table, assert_audit_table_entry, + create_patch, ) from utils_for_recordprocessor_tests.values_for_recordprocessor_tests import ( - MockFileDetails, + REGION_NAME, FileDetails, - ValidMockFileContent, + InfAckFileRows, MockFhirImmsResources, MockFieldDictionaries, + MockFileDetails, MockLocalIds, - InfAckFileRows, - REGION_NAME, -) -from utils_for_recordprocessor_tests.mock_environment_variables import ( - MOCK_ENVIRONMENT_DICT, - BucketNames, - Kinesis, -) -from utils_for_recordprocessor_tests.utils_for_recordprocessor_tests import ( - create_patch, + ValidMockFileContent, ) with patch("os.environ", MOCK_ENVIRONMENT_DICT): + from batch_processor import main from constants import ( - Diagnostics, - FileStatus, - FileNotProcessedReason, AUDIT_TABLE_NAME, AuditTableKeys, + Diagnostics, + FileNotProcessedReason, + FileStatus, ) - from batch_processor import main s3_client = boto3_client("s3", region_name=REGION_NAME) kinesis_client = boto3_client("kinesis", region_name=REGION_NAME) @@ -149,10 +147,9 @@ def make_kinesis_assertions(self, test_cases): for test_name, index, expected_kinesis_data, expect_success in test_cases: with self.subTest(test_name): - kinesis_record = kinesis_records[index] self.assertEqual(kinesis_record["PartitionKey"], mock_rsv_emis_file.queue_name) - self.assertEqual(kinesis_record["SequenceNumber"], f"{index+1}") + self.assertEqual(kinesis_record["SequenceNumber"], f"{index + 1}") # Ensure that arrival times are sequential approximate_arrival_timestamp = kinesis_record["ApproximateArrivalTimestamp"] @@ -164,7 +161,7 @@ def make_kinesis_assertions(self, test_cases): kinesis_data = json.loads(kinesis_record["Data"].decode("utf-8"), parse_float=Decimal) expected_kinesis_data = { - "row_id": f"{mock_rsv_emis_file.message_id}^{index+1}", + "row_id": f"{mock_rsv_emis_file.message_id}^{index + 1}", "file_key": mock_rsv_emis_file.file_key, "supplier": mock_rsv_emis_file.supplier, "vax_type": mock_rsv_emis_file.vaccine_type, @@ -493,7 +490,6 @@ def test_e2e_empty_file_is_flagged_and_processed_correctly(self): ), ] for description, file_content in test_cases: - with self.subTest(description=description): self.mock_batch_processor_logger.reset_mock() test_file = mock_rsv_emis_file diff --git a/recordprocessor/tests/test_send_to_kinesis.py b/recordprocessor/tests/test_send_to_kinesis.py index d46e471f8..4a6d3deae 100644 --- a/recordprocessor/tests/test_send_to_kinesis.py +++ b/recordprocessor/tests/test_send_to_kinesis.py @@ -1,8 +1,12 @@ import unittest from unittest.mock import patch -from moto import mock_kinesis + from boto3 import client as boto3_client +from moto import mock_kinesis +from tests.utils_for_recordprocessor_tests.mock_environment_variables import ( + MOCK_ENVIRONMENT_DICT, +) from tests.utils_for_recordprocessor_tests.utils_for_recordprocessor_tests import ( GenericSetUp, GenericTearDown, @@ -10,9 +14,6 @@ from tests.utils_for_recordprocessor_tests.values_for_recordprocessor_tests import ( REGION_NAME, ) -from tests.utils_for_recordprocessor_tests.mock_environment_variables import ( - MOCK_ENVIRONMENT_DICT, -) with patch("os.environ", MOCK_ENVIRONMENT_DICT): from send_to_kinesis import send_to_kinesis @@ -22,7 +23,6 @@ @mock_kinesis class TestSendToKinesis(unittest.TestCase): - def setUp(self) -> None: GenericSetUp(None, None, kinesis_client) @@ -31,7 +31,6 @@ def tearDown(self) -> None: @patch.dict("os.environ", MOCK_ENVIRONMENT_DICT) def test_send_to_kinesis_success(self): - kinesis_client.return_value = {"ResponseMetadata": {"HTTPStatusCode": 200}} # arrange required parameters diff --git a/recordprocessor/tests/test_utils_for_fhir_conversion.py b/recordprocessor/tests/test_utils_for_fhir_conversion.py index 37431673d..7b77b2ad5 100644 --- a/recordprocessor/tests/test_utils_for_fhir_conversion.py +++ b/recordprocessor/tests/test_utils_for_fhir_conversion.py @@ -1,15 +1,16 @@ """Unit tests for batch utils""" import unittest -from unittest.mock import patch from decimal import Decimal +from unittest.mock import patch + from tests.utils_for_recordprocessor_tests.mock_environment_variables import ( MOCK_ENVIRONMENT_DICT, ) with patch("os.environ", MOCK_ENVIRONMENT_DICT): from constants import Urls - from utils_for_fhir_conversion import _is_not_empty, Generate, Add, Convert + from utils_for_fhir_conversion import Add, Convert, Generate, _is_not_empty class TestBatchUtils(unittest.TestCase): diff --git a/recordprocessor/tests/test_utils_for_recordprocessor.py b/recordprocessor/tests/test_utils_for_recordprocessor.py index f7d31d8c4..ab3ba099b 100644 --- a/recordprocessor/tests/test_utils_for_recordprocessor.py +++ b/recordprocessor/tests/test_utils_for_recordprocessor.py @@ -1,32 +1,34 @@ """Tests for the utils_for_recordprocessor module""" +import csv import unittest -from unittest.mock import patch from io import StringIO -import csv +from unittest.mock import patch + import boto3 from moto import mock_s3 + +from tests.utils_for_recordprocessor_tests.mock_environment_variables import ( + MOCK_ENVIRONMENT_DICT, + BucketNames, +) from tests.utils_for_recordprocessor_tests.utils_for_recordprocessor_tests import ( GenericSetUp, GenericTearDown, ) from tests.utils_for_recordprocessor_tests.values_for_recordprocessor_tests import ( + REGION_NAME, MockFileDetails, ValidMockFileContent, - REGION_NAME, -) -from tests.utils_for_recordprocessor_tests.mock_environment_variables import ( - MOCK_ENVIRONMENT_DICT, - BucketNames, ) with patch("os.environ", MOCK_ENVIRONMENT_DICT): + from file_level_validation import move_file from utils_for_recordprocessor import ( - get_environment, - get_csv_content_dict_reader, create_diagnostics_dictionary, + get_csv_content_dict_reader, + get_environment, ) - from file_level_validation import move_file s3_client = boto3.client("s3", region_name=REGION_NAME) test_file = MockFileDetails.rsv_emis diff --git a/recordprocessor/tests/utils_for_recordprocessor_tests/decorator_constants.py b/recordprocessor/tests/utils_for_recordprocessor_tests/decorator_constants.py index 0f15b43f5..eb391411c 100644 --- a/recordprocessor/tests/utils_for_recordprocessor_tests/decorator_constants.py +++ b/recordprocessor/tests/utils_for_recordprocessor_tests/decorator_constants.py @@ -1,13 +1,14 @@ """Constants for use when testing decorators""" -from unittest.mock import patch from decimal import Decimal -from tests.utils_for_recordprocessor_tests.values_for_recordprocessor_tests import ( - TargetDiseaseElements, -) +from unittest.mock import patch + from tests.utils_for_recordprocessor_tests.mock_environment_variables import ( MOCK_ENVIRONMENT_DICT, ) +from tests.utils_for_recordprocessor_tests.values_for_recordprocessor_tests import ( + TargetDiseaseElements, +) with patch("os.environ", MOCK_ENVIRONMENT_DICT): from constants import Urls diff --git a/recordprocessor/tests/utils_for_recordprocessor_tests/generic_setup_and_teardown.py b/recordprocessor/tests/utils_for_recordprocessor_tests/generic_setup_and_teardown.py index cbd03a663..7f5f2c32a 100644 --- a/recordprocessor/tests/utils_for_recordprocessor_tests/generic_setup_and_teardown.py +++ b/recordprocessor/tests/utils_for_recordprocessor_tests/generic_setup_and_teardown.py @@ -3,20 +3,20 @@ from unittest.mock import patch from tests.utils_for_recordprocessor_tests.mock_environment_variables import ( - BucketNames, MOCK_ENVIRONMENT_DICT, - Sqs, + BucketNames, Firehose, + Sqs, ) # Ensure environment variables are mocked before importing from src files with patch.dict("os.environ", MOCK_ENVIRONMENT_DICT): from clients import REGION_NAME from constants import ( - AuditTableKeys, - AUDIT_TABLE_QUEUE_NAME_GSI, AUDIT_TABLE_FILENAME_GSI, AUDIT_TABLE_NAME, + AUDIT_TABLE_QUEUE_NAME_GSI, + AuditTableKeys, ) @@ -120,7 +120,6 @@ def __init__( sqs_client=None, dynamodb_client=None, ): - if s3_client: for bucket_name in [ BucketNames.SOURCE, diff --git a/recordprocessor/tests/utils_for_recordprocessor_tests/utils_for_recordprocessor_tests.py b/recordprocessor/tests/utils_for_recordprocessor_tests/utils_for_recordprocessor_tests.py index 883fc8146..ce7ced903 100644 --- a/recordprocessor/tests/utils_for_recordprocessor_tests/utils_for_recordprocessor_tests.py +++ b/recordprocessor/tests/utils_for_recordprocessor_tests/utils_for_recordprocessor_tests.py @@ -1,32 +1,32 @@ """Utils for the recordprocessor tests""" from io import StringIO +from typing import Optional +from unittest.mock import patch + +from boto3 import client as boto3_client +from boto3.dynamodb.types import TypeDeserializer from utils_for_recordprocessor_tests.mock_environment_variables import ( + MOCK_ENVIRONMENT_DICT, BucketNames, Firehose, Kinesis, ) from utils_for_recordprocessor_tests.values_for_recordprocessor_tests import ( - MockFileDetails, FileDetails, + MockFileDetails, ) -from boto3.dynamodb.types import TypeDeserializer -from boto3 import client as boto3_client -from unittest.mock import patch -from utils_for_recordprocessor_tests.mock_environment_variables import ( - MOCK_ENVIRONMENT_DICT, -) -from typing import Optional # Ensure environment variables are mocked before importing from src files with patch.dict("os.environ", MOCK_ENVIRONMENT_DICT): - from clients import REGION_NAME from csv import DictReader + + from clients import REGION_NAME from constants import ( - AuditTableKeys, - AUDIT_TABLE_NAME, AUDIT_TABLE_FILENAME_GSI, + AUDIT_TABLE_NAME, AUDIT_TABLE_QUEUE_NAME_GSI, + AuditTableKeys, ) dynamodb_client = boto3_client("dynamodb", region_name=REGION_NAME) @@ -60,7 +60,6 @@ def __init__( kinesis_client=None, dynamo_db_client=None, ): - if s3_client: for bucket_name in [ BucketNames.SOURCE, @@ -144,7 +143,6 @@ def __init__( kinesis_client=None, dynamo_db_client=None, ): - if s3_client: for bucket_name in [BucketNames.SOURCE, BucketNames.DESTINATION]: for obj in s3_client.list_objects_v2(Bucket=bucket_name).get("Contents", []): diff --git a/recordprocessor/tests/utils_for_recordprocessor_tests/values_for_recordprocessor_tests.py b/recordprocessor/tests/utils_for_recordprocessor_tests/values_for_recordprocessor_tests.py index 365014356..9381babb7 100644 --- a/recordprocessor/tests/utils_for_recordprocessor_tests/values_for_recordprocessor_tests.py +++ b/recordprocessor/tests/utils_for_recordprocessor_tests/values_for_recordprocessor_tests.py @@ -1,14 +1,15 @@ """Values for use in tests""" -from unittest.mock import patch import json from decimal import Decimal +from unittest.mock import patch + from tests.utils_for_recordprocessor_tests.mock_environment_variables import ( MOCK_ENVIRONMENT_DICT, ) with patch("os.environ", MOCK_ENVIRONMENT_DICT): - from constants import Urls, AuditTableKeys + from constants import AuditTableKeys, Urls REGION_NAME = "eu-west-2" diff --git a/ruff.toml b/ruff.toml new file mode 100644 index 000000000..ecb080d4d --- /dev/null +++ b/ruff.toml @@ -0,0 +1,8 @@ +line-length = 121 +target-version = "py311" + +[lint] +# TODO - consider enabling more rulesets, e.g. B, C90, N, RET, S, UP +select = ["E", "F", "I", "W"] +# Recommended exclusions as per the docs, plus E501 line-too-long +ignore = ["E111", "E114", "E117", "E501", "W191"] diff --git a/scripts/calculate_version.py b/scripts/calculate_version.py index 905d7de7c..6fe09935a 100644 --- a/scripts/calculate_version.py +++ b/scripts/calculate_version.py @@ -16,12 +16,12 @@ +startversioning Reset version to v1.0.0-alpha """ -import os.path import itertools +import os.path + import git import semver - SCRIPT_LOCATION = os.path.join(os.path.dirname(os.path.abspath(__file__))) REPO_ROOT = os.path.abspath(os.path.join(SCRIPT_LOCATION, "..")) REPO = git.Repo(REPO_ROOT) diff --git a/scripts/destroy_unused_workspaces.py b/scripts/destroy_unused_workspaces.py index 7a58858d5..b1dc82750 100644 --- a/scripts/destroy_unused_workspaces.py +++ b/scripts/destroy_unused_workspaces.py @@ -1,5 +1,5 @@ -import subprocess import os +import subprocess def execute_terraform_command(command, cwd=None): @@ -59,7 +59,7 @@ def list_pr_workspaces(prefix): def destroy_workspace(workspace_name, project_name, project_short_name): command_select = f"terraform workspace select {workspace_name}" - tf_vars = f"-var=project_name={project_name} " f"-var=project_short_name={project_short_name} " + tf_vars = f"-var=project_name={project_name} -var=project_short_name={project_short_name} " command_destroy = f"terraform destroy {tf_vars} -auto-approve" command_delete = f"terraform workspace select default && terraform workspace delete {workspace_name}" diff --git a/scripts/set_version.py b/scripts/set_version.py index bec5c846b..0009394ee 100644 --- a/scripts/set_version.py +++ b/scripts/set_version.py @@ -5,8 +5,10 @@ Reads an openapi spec on stdin and adds the calculated version to it, then prints it on stdout. """ -import sys + import json +import sys + from calculate_version import calculate_version diff --git a/scripts/yaml2json.py b/scripts/yaml2json.py index 1842fda69..35793175a 100644 --- a/scripts/yaml2json.py +++ b/scripts/yaml2json.py @@ -4,9 +4,11 @@ Takes yaml on stdin and writes json on stdout, converting dates correctly. """ -import sys -import json + import datetime +import json +import sys + import yaml diff --git a/temporary_sandbox/fhir_api/__init__.py b/temporary_sandbox/fhir_api/__init__.py index 875867b54..1de14b85e 100644 --- a/temporary_sandbox/fhir_api/__init__.py +++ b/temporary_sandbox/fhir_api/__init__.py @@ -3,16 +3,16 @@ """ import os + from fastapi import FastAPI +from fhir_api.models.fhir_r4.common import Identifier, Reference from fhir_api.routes import ( - root, dynamodb, + root, status_endpoints, ) -from fhir_api.models.fhir_r4.common import Reference, Identifier - Reference.update_forward_refs(identifier=Identifier) @@ -20,7 +20,7 @@ title=os.getenv("FASTAPI_TITLE", "Immunisation Fhir API"), description=os.getenv("FASTAPI_DESC", "API"), version=os.getenv("VERSION", "DEVELOPMENT"), - root_path=f'/{os.getenv("SERVICE_BASE_PATH")}/', + root_path=f"/{os.getenv('SERVICE_BASE_PATH')}/", docs_url="/documentation", redoc_url="/redocumentation", ) diff --git a/temporary_sandbox/fhir_api/exceptions/base_exceptions.py b/temporary_sandbox/fhir_api/exceptions/base_exceptions.py index b2cf0a4f0..a3620cf37 100644 --- a/temporary_sandbox/fhir_api/exceptions/base_exceptions.py +++ b/temporary_sandbox/fhir_api/exceptions/base_exceptions.py @@ -3,14 +3,16 @@ # pylint: disable=W0231 from typing import Type + import fastapi from fastapi.responses import JSONResponse + from fhir_api.models.errors import ( - NotFoundError, AlreadyExistsError, - WebSocketError, BaseError, BaseIdentifiedError, + NotFoundError, + WebSocketError, ) diff --git a/temporary_sandbox/fhir_api/models/dynamodb/data_input.py b/temporary_sandbox/fhir_api/models/dynamodb/data_input.py index a926ab875..be0188752 100644 --- a/temporary_sandbox/fhir_api/models/dynamodb/data_input.py +++ b/temporary_sandbox/fhir_api/models/dynamodb/data_input.py @@ -1,12 +1,12 @@ """Data input model for DynamoDB""" from typing import Optional + +from fhir_api.models.fhir_r4.fhir_datatype_fields import FhirR4Fields from pydantic import ( BaseModel, ) -from fhir_api.models.fhir_r4.fhir_datatype_fields import FhirR4Fields - class DataInput(BaseModel): """Data input model""" diff --git a/temporary_sandbox/fhir_api/models/dynamodb/read_models.py b/temporary_sandbox/fhir_api/models/dynamodb/read_models.py index b34c3557d..1440831ab 100644 --- a/temporary_sandbox/fhir_api/models/dynamodb/read_models.py +++ b/temporary_sandbox/fhir_api/models/dynamodb/read_models.py @@ -1,18 +1,18 @@ """Read Models for Dynamodb""" from typing import ( - Union, Literal, Optional, -) -from pydantic import ( - BaseModel, - Field, + Union, ) from fhir_api.models.fhir_r4.fhir_datatype_fields import FhirR4Fields from fhir_api.models.fhir_r4.immunization import Immunization from fhir_api.models.fhir_r4.patient import Patient +from pydantic import ( + BaseModel, + Field, +) class Resource(BaseModel): diff --git a/temporary_sandbox/fhir_api/models/dynamodb/update_model.py b/temporary_sandbox/fhir_api/models/dynamodb/update_model.py index 33cb38266..c89d6ba01 100644 --- a/temporary_sandbox/fhir_api/models/dynamodb/update_model.py +++ b/temporary_sandbox/fhir_api/models/dynamodb/update_model.py @@ -1,19 +1,17 @@ """Update Model for Immunization Records""" import datetime - -from typing import Optional, Literal, Any -from pydantic import BaseModel, Field +from typing import Any, Literal, Optional import fhir_api.models.fhir_r4.code_types as code_types -from fhir_api.models.fhir_r4.fhir_datatype_fields import FhirR4Fields from fhir_api.models.fhir_r4.common import ( CodeableConceptType, - Reference, Quantity, + Reference, ) - -from fhir_api.models.fhir_r4.immunization import Performer, ProtocolApplied, Annotation +from fhir_api.models.fhir_r4.fhir_datatype_fields import FhirR4Fields +from fhir_api.models.fhir_r4.immunization import Annotation, Performer, ProtocolApplied +from pydantic import BaseModel, Field class UpdateImmunizationRecord(BaseModel): diff --git a/temporary_sandbox/fhir_api/models/fhir_r4/common.py b/temporary_sandbox/fhir_api/models/fhir_r4/common.py index d303a4589..ba648ad78 100644 --- a/temporary_sandbox/fhir_api/models/fhir_r4/common.py +++ b/temporary_sandbox/fhir_api/models/fhir_r4/common.py @@ -1,15 +1,13 @@ """Common FHIR Data Models""" +from datetime import datetime from typing import ( Optional, ) -from pydantic import BaseModel, validator, PositiveInt - -from datetime import datetime - import fhir_api.models.fhir_r4.code_types as code_types from fhir_api.models.fhir_r4.fhir_datatype_fields import FhirR4Fields +from pydantic import BaseModel, PositiveInt, validator class CodingType(BaseModel): diff --git a/temporary_sandbox/fhir_api/models/fhir_r4/fhir_datatype_fields.py b/temporary_sandbox/fhir_api/models/fhir_r4/fhir_datatype_fields.py index e3a276ae2..cd4906321 100644 --- a/temporary_sandbox/fhir_api/models/fhir_r4/fhir_datatype_fields.py +++ b/temporary_sandbox/fhir_api/models/fhir_r4/fhir_datatype_fields.py @@ -1,14 +1,14 @@ """Generic Fields for FHIR Revision 4""" import datetime - -from typing import Union from functools import partial +from typing import Union + from pydantic import ( - Field, AnyUrl, - conint, + Field, PositiveInt, + conint, ) diff --git a/temporary_sandbox/fhir_api/models/fhir_r4/immunization.py b/temporary_sandbox/fhir_api/models/fhir_r4/immunization.py index da4b53fb4..7e8a634fa 100644 --- a/temporary_sandbox/fhir_api/models/fhir_r4/immunization.py +++ b/temporary_sandbox/fhir_api/models/fhir_r4/immunization.py @@ -1,17 +1,17 @@ """Immunization Data Model based on Fhir Revision 4 spec""" -from typing import Optional, Literal, Any -from pydantic import BaseModel, PositiveInt import datetime +from typing import Any, Literal, Optional import fhir_api.models.fhir_r4.code_types as code_types -from fhir_api.models.fhir_r4.fhir_datatype_fields import FhirR4Fields from fhir_api.models.fhir_r4.common import ( - Identifier, CodeableConceptType, - Reference, + Identifier, Quantity, + Reference, ) +from fhir_api.models.fhir_r4.fhir_datatype_fields import FhirR4Fields +from pydantic import BaseModel, PositiveInt class Performer(BaseModel): diff --git a/temporary_sandbox/fhir_api/models/fhir_r4/patient.py b/temporary_sandbox/fhir_api/models/fhir_r4/patient.py index d77d09d51..1e4019976 100644 --- a/temporary_sandbox/fhir_api/models/fhir_r4/patient.py +++ b/temporary_sandbox/fhir_api/models/fhir_r4/patient.py @@ -1,22 +1,22 @@ """Patient Data Model based on Fhir Revision 4 spec""" from datetime import datetime -from typing import Optional, Literal -from pydantic import ( - BaseModel, -) +from typing import Literal, Optional import fhir_api.models.fhir_r4.code_types as code_types -from fhir_api.models.fhir_r4.fhir_datatype_fields import FhirR4Fields from fhir_api.models.fhir_r4.common import ( - Identifier, - HumanName, - ContactPoint, Address, - CodeableConceptType, Attachment, - Reference, + CodeableConceptType, + ContactPoint, + HumanName, + Identifier, Period, + Reference, +) +from fhir_api.models.fhir_r4.fhir_datatype_fields import FhirR4Fields +from pydantic import ( + BaseModel, ) diff --git a/temporary_sandbox/fhir_api/routes/dynamodb.py b/temporary_sandbox/fhir_api/routes/dynamodb.py index 1548fe0eb..efcd975d9 100644 --- a/temporary_sandbox/fhir_api/routes/dynamodb.py +++ b/temporary_sandbox/fhir_api/routes/dynamodb.py @@ -1,9 +1,9 @@ """DynamoDB Router Methods""" import json +from typing import Optional from fastapi import APIRouter -from typing import Optional from fhir_api.models.dynamodb.read_models import BatchImmunizationRead @@ -24,7 +24,6 @@ def read_immunization_record( to_date: Optional[str] = "9999-01-01", include_record: Optional[str] = None, ) -> BatchImmunizationRead: - with open("/sandbox/fhir_api/sandbox_data.json", "r") as input: data = json.load(input) diff --git a/temporary_sandbox/fhir_api/routes/root.py b/temporary_sandbox/fhir_api/routes/root.py index f900ec312..e30cc7eae 100644 --- a/temporary_sandbox/fhir_api/routes/root.py +++ b/temporary_sandbox/fhir_api/routes/root.py @@ -3,7 +3,6 @@ from fastapi import APIRouter from fastapi.responses import PlainTextResponse - router = APIRouter()