diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index e3c5f8e5e..e1a2446e2 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -4,7 +4,7 @@ repos: rev: v1.4.0 hooks: - id: detect-secrets - exclude: ".pre-commit-config.yaml|infrastructure/localstack/provider.tf|src/etl/sds/tests/changelog|src/etl/sds/worker/bulk/transform_bulk/tests|src/etl/sds/worker/bulk/tests/stage_data|src/api/tests/smoke_tests/test_smoke.py|archived_epr/src_old/api/tests/smoke_tests/test_smoke.py" + exclude: ".pre-commit-config.yaml|infrastructure/localstack/provider.tf|archived_epr/src_old/etl/sds/tests/changelog|archived_epr/src_old/etl/sds/worker/bulk/transform_bulk/tests|archived_epr/src_old/etl/sds/worker/bulk/tests/stage_data|src/api/tests/smoke_tests/test_smoke.py|archived_epr/src_old/api/tests/smoke_tests/test_smoke.py" - repo: https://github.com/prettier/pre-commit rev: 57f39166b5a5a504d6808b87ab98d41ebf095b46 diff --git a/CHANGELOG.md b/CHANGELOG.md index 7fc137d18..035e736bb 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,5 +1,12 @@ # Changelog +## 2025-02-26 +- [PI-793] Remove EPR ETL +- [PI-795] Remove EPR repository layers +- [PI-834] Remove EPR domain logic +- Dependabot: Update black +- Dependabot: Update attrs + ## 2025-02-24 - [PI-794] Remove EPR S3 tests - [PI-788] Create Product Search and Delete Flows for test UI diff --git a/VERSION b/VERSION index 6d1ac6d91..d66c9ef6c 100644 --- a/VERSION +++ b/VERSION @@ -1 +1 @@ -2025.02.24 +2025.02.26 diff --git a/archived_epr/infrastructure/terraform/per_workspace/.terraform-version b/archived_epr/infrastructure/terraform/per_workspace/.terraform-version new file mode 100644 index 000000000..f01291b87 --- /dev/null +++ b/archived_epr/infrastructure/terraform/per_workspace/.terraform-version @@ -0,0 +1 @@ +1.5.7 diff --git a/archived_epr/infrastructure/terraform/per_workspace/locals.tf b/archived_epr/infrastructure/terraform/per_workspace/locals.tf new file mode 100644 index 000000000..25a75d4d4 --- /dev/null +++ b/archived_epr/infrastructure/terraform/per_workspace/locals.tf @@ -0,0 +1,15 @@ +locals { + region = "eu-west-2" + project = "nhse-cpm" + current_time = timestamp() + workspace_type = var.workspace_type + permission_resource_map = { + kms = ["*"] + dynamodb = ["${module.eprtable.dynamodb_table_arn}", "${module.eprtable.dynamodb_table_arn}/*", "${module.cpmtable.dynamodb_table_arn}", "${module.cpmtable.dynamodb_table_arn}/*"] + } + # e.g. api.cpm.dev.national.nhs.uk + zone = var.domain + + domain = "${terraform.workspace}.${var.domain}" + etl_snapshot_bucket = contains(["int", "prod"], var.environment) ? "${local.project}--${replace(var.environment, "_", "-")}--snapshot" : "snapshot_not_required" +} diff --git a/archived_epr/infrastructure/terraform/per_workspace/main.tf b/archived_epr/infrastructure/terraform/per_workspace/main.tf new file mode 100644 index 000000000..20d2e622f --- /dev/null +++ b/archived_epr/infrastructure/terraform/per_workspace/main.tf @@ -0,0 +1,225 @@ +resource "aws_resourcegroups_group" "resource_group" { + name = "${local.project}--${replace(terraform.workspace, "_", "-")}--resource-group" + description = "${local.workspace_type} workspace resource group." + tags = { + Name = "${local.project}--${replace(terraform.workspace, "_", "-")}--resource-group" + CreatedOn = var.updated_date + LastUpdated = var.updated_date + ExpirationDate = var.expiration_date + } + + lifecycle { + ignore_changes = [tags["CreatedOn"]] + } + + resource_query { + query = < 0 + policy_statements = { + for file in fileset("${path.module}/../../../src/api/${each.key}/policies", "*.json") : replace(file, ".json", "") => { + effect = "Allow" + actions = jsondecode(file("${path.module}/../../../src/api/${each.key}/policies/${file}")) + resources = local.permission_resource_map[replace(file, ".json", "")] + } + } + memory_size = var.lambda_memory_size +} + +module "authoriser" { + name = "authoriser" + source = "./modules/api_worker/api_lambda" + python_version = var.python_version + lambda_name = "${local.project}--${replace(terraform.workspace, "_", "-")}--authoriser" + source_path = "${path.module}/../../../src/api/authoriser/dist/authoriser.zip" + environment_variables = { + ENVIRONMENT = var.environment + } + layers = concat( + compact([for instance in module.layers : contains(var.api_lambda_layers, instance.name) ? instance.layer_arn : null]), + [element([for instance in module.third_party_layers : instance if instance.name == "third_party_core"], 0).layer_arn] + ) + trusted_entities = [ + { + type = "Service", + identifiers = [ + "apigateway.amazonaws.com" + ] + } + ] + + attach_policy_json = true + policy_json = <<-EOT + { + "Version": "2012-10-17", + "Statement": [ + { + "Action": "lambda:InvokeFunction", + "Effect": "Allow", + "Resource": "arn:aws:lambda:eu-west-2:${var.assume_account}:function:${local.project}--${replace(terraform.workspace, "_", "-")}--authoriser" + }, + { + "Action": "secretsmanager:GetSecretValue", + "Effect": "Allow", + "Resource": "${data.aws_secretsmanager_secret.cpm_apigee_api_key.arn}" + } + ] + } + EOT +} + +module "domain" { + source = "./modules/domain" + domain = local.domain + zone = local.zone +} + +module "api_entrypoint" { + source = "./modules/api_entrypoint" + assume_account = var.assume_account + project = local.project + name = "${local.project}--${replace(terraform.workspace, "_", "-")}--api-entrypoint" + lambdas = setsubtract(var.lambdas, ["authoriser"]) + authoriser_metadata = module.authoriser.metadata + domain = module.domain.domain_cert + depends_on = [module.domain] +} + +data "aws_s3_bucket" "truststore_bucket" { + bucket = "${local.project}--${replace(var.environment, "_", "-")}--truststore" +} + + +module "sds_etl" { + source = "./modules/etl/sds" + workspace_prefix = "${local.project}--${replace(terraform.workspace, "_", "-")}" + assume_account = var.assume_account + python_version = var.python_version + event_layer_arn = element([for instance in module.layers : instance if instance.name == "event"], 0).layer_arn + third_party_core_layer_arn = element([for instance in module.third_party_layers : instance if instance.name == "third_party_sds"], 0).layer_arn + third_party_sds_update_layer_arn = element([for instance in module.third_party_layers : instance if instance.name == "third_party_sds_update"], 0).layer_arn + domain_layer_arn = element([for instance in module.layers : instance if instance.name == "domain"], 0).layer_arn + sds_layer_arn = element([for instance in module.layers : instance if instance.name == "sds"], 0).layer_arn + table_name = module.eprtable.dynamodb_table_name + table_arn = module.eprtable.dynamodb_table_arn + is_persistent = var.workspace_type == "PERSISTENT" + truststore_bucket = data.aws_s3_bucket.truststore_bucket + etl_snapshot_bucket = local.etl_snapshot_bucket + environment = var.environment +} diff --git a/archived_epr/infrastructure/terraform/per_workspace/modules/api_entrypoint/api_gateway/api_gateway.tf b/archived_epr/infrastructure/terraform/per_workspace/modules/api_entrypoint/api_gateway/api_gateway.tf new file mode 100644 index 000000000..7a98c064a --- /dev/null +++ b/archived_epr/infrastructure/terraform/per_workspace/modules/api_entrypoint/api_gateway/api_gateway.tf @@ -0,0 +1,122 @@ +resource "aws_api_gateway_rest_api" "api_gateway_rest_api" { + name = var.name + description = "API Gateway Rest API - autogenerated from swagger" + # UNCOMMENT THIS WHEN ENABLING CUSTOM DOMAINS + # disable_execute_api_endpoint = true + body = sensitive(local.swagger_file) + + depends_on = [ + aws_cloudwatch_log_group.api_gateway_access_logs + ] + +} + +resource "aws_api_gateway_deployment" "api_gateway_deployment" { + rest_api_id = aws_api_gateway_rest_api.api_gateway_rest_api.id + + triggers = { + redeployment = sha1(jsonencode(aws_api_gateway_rest_api.api_gateway_rest_api.body)) + resource_change = "${md5(file("${path.module}/api_gateway.tf"))}" + } + + lifecycle { + create_before_destroy = true + } + + depends_on = [ + aws_api_gateway_rest_api.api_gateway_rest_api, + aws_cloudwatch_log_group.api_gateway_access_logs, + aws_cloudwatch_log_group.api_gateway_execution_logs + ] +} + +resource "aws_api_gateway_stage" "api_gateway_stage" { + deployment_id = aws_api_gateway_deployment.api_gateway_deployment.id + rest_api_id = aws_api_gateway_rest_api.api_gateway_rest_api.id + stage_name = "production" + xray_tracing_enabled = true + + access_log_settings { + destination_arn = aws_cloudwatch_log_group.api_gateway_access_logs.arn + format = jsonencode({ + requestid : "$context.requestId", + ip : "$context.identity.sourceIp", + user_agent : "$context.identity.userAgent", + request_time : "$context.requestTime", + http_method : "$context.httpMethod", + path : "$context.path", + status : "$context.status", + protocol : "$context.protocol", + response_length : "$context.responseLength", + x_correlationid : "$context.authorizer.x-correlation-id", + nhsd_correlationid : "$context.authorizer.nhsd-correlation-id" + environment : terraform.workspace + }) + } + + depends_on = [ + aws_api_gateway_deployment.api_gateway_deployment, + aws_api_gateway_rest_api.api_gateway_rest_api, + aws_cloudwatch_log_group.api_gateway_access_logs, + aws_cloudwatch_log_group.api_gateway_execution_logs + ] +} + +resource "aws_api_gateway_method_settings" "api_gateway_method_settings" { + rest_api_id = aws_api_gateway_rest_api.api_gateway_rest_api.id + stage_name = aws_api_gateway_stage.api_gateway_stage.stage_name + method_path = "*/*" + settings { + logging_level = "INFO" + data_trace_enabled = true + } + + depends_on = [ + aws_api_gateway_rest_api.api_gateway_rest_api, + aws_api_gateway_stage.api_gateway_stage + ] +} + +resource "aws_api_gateway_gateway_response" "api_access_denied" { + rest_api_id = aws_api_gateway_rest_api.api_gateway_rest_api.id + response_type = "ACCESS_DENIED" + response_templates = { + "application/json" = jsonencode({ + errors : [{ + code : "PROCESSING" + message : "$context.authorizer.error" + }] + }) + } + response_parameters = { + "gatewayresponse.header.Access-Control-Allow-Origin" = "'*'" + } +} + +resource "aws_api_gateway_gateway_response" "api_default_4xx" { + rest_api_id = aws_api_gateway_rest_api.api_gateway_rest_api.id + response_type = "DEFAULT_4XX" + response_templates = { + "application/json" = jsonencode({ + errors : [{ + code : "PROCESSING" + message : "$context.error.message" + }] + }) } + response_parameters = { "gatewayresponse.header.Access-Control-Allow-Origin" = "'*'" + } +} + +resource "aws_api_gateway_gateway_response" "api_default_5xx" { + rest_api_id = aws_api_gateway_rest_api.api_gateway_rest_api.id + response_type = "DEFAULT_5XX" + response_templates = { + "application/json" = jsonencode({ + errors : [{ + code : "PROCESSING" + message : "exception" + }] + }) } + response_parameters = { "gatewayresponse.header.Access-Control-Allow-Origin" = "'*'" + } +} diff --git a/archived_epr/infrastructure/terraform/per_workspace/modules/api_entrypoint/api_gateway/cloudwatch.tf b/archived_epr/infrastructure/terraform/per_workspace/modules/api_entrypoint/api_gateway/cloudwatch.tf new file mode 100644 index 000000000..ea1ad78ac --- /dev/null +++ b/archived_epr/infrastructure/terraform/per_workspace/modules/api_entrypoint/api_gateway/cloudwatch.tf @@ -0,0 +1,22 @@ +resource "aws_cloudwatch_log_group" "api_gateway_access_logs" { + name = "/aws/api-gateway/access-logs/${var.name}" + + kms_key_id = module.kms.key_arn + + depends_on = [ + module.kms + ] + +} + +resource "aws_cloudwatch_log_group" "api_gateway_execution_logs" { + name = "API-Gateway-Execution-Logs_${aws_api_gateway_rest_api.api_gateway_rest_api.id}/production" + + kms_key_id = module.kms.key_arn + + depends_on = [ + aws_api_gateway_rest_api.api_gateway_rest_api, + module.kms + ] + +} diff --git a/archived_epr/infrastructure/terraform/per_workspace/modules/api_entrypoint/api_gateway/debug.tf b/archived_epr/infrastructure/terraform/per_workspace/modules/api_entrypoint/api_gateway/debug.tf new file mode 100644 index 000000000..e2129e4c7 --- /dev/null +++ b/archived_epr/infrastructure/terraform/per_workspace/modules/api_entrypoint/api_gateway/debug.tf @@ -0,0 +1,4 @@ +resource "local_file" "rendered_swagger" { + content = sensitive(local.swagger_file) + filename = "${path.root}/../../swagger/dist/aws/rendered/swagger.yaml" +} diff --git a/archived_epr/infrastructure/terraform/per_workspace/modules/api_entrypoint/api_gateway/domain.tf b/archived_epr/infrastructure/terraform/per_workspace/modules/api_entrypoint/api_gateway/domain.tf new file mode 100644 index 000000000..83836d45f --- /dev/null +++ b/archived_epr/infrastructure/terraform/per_workspace/modules/api_entrypoint/api_gateway/domain.tf @@ -0,0 +1,8 @@ +resource "aws_api_gateway_base_path_mapping" "mapping" { + api_id = aws_api_gateway_rest_api.api_gateway_rest_api.id + stage_name = aws_api_gateway_stage.api_gateway_stage.stage_name + domain_name = var.domain + depends_on = [ + aws_api_gateway_stage.api_gateway_stage + ] +} diff --git a/archived_epr/infrastructure/terraform/per_workspace/modules/api_entrypoint/api_gateway/kms.tf b/archived_epr/infrastructure/terraform/per_workspace/modules/api_entrypoint/api_gateway/kms.tf new file mode 100644 index 000000000..ddfa415f7 --- /dev/null +++ b/archived_epr/infrastructure/terraform/per_workspace/modules/api_entrypoint/api_gateway/kms.tf @@ -0,0 +1,46 @@ +module "kms" { + source = "terraform-aws-modules/kms/aws" + version = "2.0.1" + + deletion_window_in_days = local.kms.deletion_window_in_days + + description = "${title(var.name)}--cloudwatch KMS key" + + key_statements = { + statement = { + principals = { + principal = { + type = "Service" + + identifiers = [ + "logs.eu-west-2.amazonaws.com" + ] + } + } + actions = [ + "kms:Encrypt*", + "kms:Decrypt*", + "kms:ReEncrypt*", + "kms:GenerateDataKey*", + "kms:Describe*" + ] + resources = ["*"] + conditions = { + condition = { + test = "ArnLike" + variable = "kms:EncryptionContext:aws:logs:arn" + values = [ + "arn:aws:logs:eu-west-2:${var.assume_account}:log-group:*" + ] + } + } + } + } + + # Aliases + aliases = ["alias/${var.name}--cloudwatch"] + + tags = { + Name = replace("${var.name}--cloudwatch", "_", "-") + } +} diff --git a/archived_epr/infrastructure/terraform/per_workspace/modules/api_entrypoint/api_gateway/locals.tf b/archived_epr/infrastructure/terraform/per_workspace/modules/api_entrypoint/api_gateway/locals.tf new file mode 100644 index 000000000..5d50c10fb --- /dev/null +++ b/archived_epr/infrastructure/terraform/per_workspace/modules/api_entrypoint/api_gateway/locals.tf @@ -0,0 +1,17 @@ +locals { + apigateway_lambda_arn_prefix = "arn:aws:apigateway:eu-west-2:lambda:path/2015-03-31/functions/arn:aws:lambda:eu-west-2" + kms = { + deletion_window_in_days = 7 + } + methods = [ + for lambda_alias in setsubtract(var.lambdas, ["authoriser"]) : + { "method_${lambda_alias}" = "${local.apigateway_lambda_arn_prefix}:${var.assume_account}:function:${var.project}--${replace(terraform.workspace, "_", "-")}--${replace(replace(replace(replace(lambda_alias, "_", "-"), "DeviceReferenceData", "DeviceRefData"), "MessageHandlingSystem", "MHS"), "MessageSet", "MsgSet")}/invocations" } + ] + swagger_file = templatefile("${path.root}/../../swagger/dist/aws/swagger.yaml", merge({ + lambda_invoke_arn = var.authoriser_metadata.lambda_invoke_arn, + authoriser_iam_role = var.authoriser_metadata.authoriser_iam_role, + authoriser_name = var.authoriser_metadata.authoriser_name, + }, + local.methods... + )) +} diff --git a/archived_epr/infrastructure/terraform/per_workspace/modules/api_entrypoint/api_gateway/output.tf b/archived_epr/infrastructure/terraform/per_workspace/modules/api_entrypoint/api_gateway/output.tf new file mode 100644 index 000000000..a38e4b9c1 --- /dev/null +++ b/archived_epr/infrastructure/terraform/per_workspace/modules/api_entrypoint/api_gateway/output.tf @@ -0,0 +1,19 @@ +output "api_gateway_id" { + value = aws_api_gateway_rest_api.api_gateway_rest_api.id +} + +output "execution_arn" { + value = aws_api_gateway_rest_api.api_gateway_rest_api.execution_arn +} + +output "kms_key" { + value = module.kms.key_id +} + +output "invoke_url" { + value = aws_api_gateway_stage.api_gateway_stage.invoke_url +} + +output "api_base_url" { + value = "https://${var.domain}" +} diff --git a/archived_epr/infrastructure/terraform/per_workspace/modules/api_entrypoint/api_gateway/vars.tf b/archived_epr/infrastructure/terraform/per_workspace/modules/api_entrypoint/api_gateway/vars.tf new file mode 100644 index 000000000..fa3be1ea8 --- /dev/null +++ b/archived_epr/infrastructure/terraform/per_workspace/modules/api_entrypoint/api_gateway/vars.tf @@ -0,0 +1,21 @@ +variable "name" { + type = string +} + +variable "lambdas" { + +} + +variable "assume_account" { +} + +variable "project" { + +} + +variable "authoriser_metadata" { +} + +variable "domain" { + type = string +} diff --git a/archived_epr/infrastructure/terraform/per_workspace/modules/api_entrypoint/main.tf b/archived_epr/infrastructure/terraform/per_workspace/modules/api_entrypoint/main.tf new file mode 100644 index 000000000..e2f4df570 --- /dev/null +++ b/archived_epr/infrastructure/terraform/per_workspace/modules/api_entrypoint/main.tf @@ -0,0 +1,9 @@ +module "api_gateway" { + source = "./api_gateway" + name = var.name + lambdas = var.lambdas + assume_account = var.assume_account + project = var.project + authoriser_metadata = var.authoriser_metadata + domain = var.domain +} diff --git a/archived_epr/infrastructure/terraform/per_workspace/modules/api_entrypoint/output.tf b/archived_epr/infrastructure/terraform/per_workspace/modules/api_entrypoint/output.tf new file mode 100644 index 000000000..cb57411bf --- /dev/null +++ b/archived_epr/infrastructure/terraform/per_workspace/modules/api_entrypoint/output.tf @@ -0,0 +1,7 @@ +output "execution_arn" { + value = module.api_gateway.execution_arn +} + +output "invoke_url" { + value = module.api_gateway.invoke_url +} diff --git a/archived_epr/infrastructure/terraform/per_workspace/modules/api_entrypoint/vars.tf b/archived_epr/infrastructure/terraform/per_workspace/modules/api_entrypoint/vars.tf new file mode 100644 index 000000000..35af61ca6 --- /dev/null +++ b/archived_epr/infrastructure/terraform/per_workspace/modules/api_entrypoint/vars.tf @@ -0,0 +1,20 @@ +variable "name" { + type = string +} + +variable "lambdas" { + +} + +variable "assume_account" { +} + +variable "project" { + +} + +variable "authoriser_metadata" { +} + +variable "domain" { +} diff --git a/archived_epr/infrastructure/terraform/per_workspace/modules/api_storage/dynamodb.tf b/archived_epr/infrastructure/terraform/per_workspace/modules/api_storage/dynamodb.tf new file mode 100644 index 000000000..71529cb6e --- /dev/null +++ b/archived_epr/infrastructure/terraform/per_workspace/modules/api_storage/dynamodb.tf @@ -0,0 +1,22 @@ +module "dynamodb_table" { + source = "terraform-aws-modules/dynamodb-table/aws" + version = "3.3.0" + + name = var.name + billing_mode = "PAY_PER_REQUEST" + hash_key = var.hash_key + range_key = var.range_key + deletion_protection_enabled = var.deletion_protection_enabled + attributes = var.attributes + global_secondary_indexes = var.global_secondary_indexes + + server_side_encryption_enabled = true + server_side_encryption_kms_key_arn = module.kms.key_arn + + point_in_time_recovery_enabled = true + + tags = { + Name = var.name + } + +} diff --git a/archived_epr/infrastructure/terraform/per_workspace/modules/api_storage/iam.tf b/archived_epr/infrastructure/terraform/per_workspace/modules/api_storage/iam.tf new file mode 100644 index 000000000..e67ce3a9b --- /dev/null +++ b/archived_epr/infrastructure/terraform/per_workspace/modules/api_storage/iam.tf @@ -0,0 +1,79 @@ +module "iam_policy_read" { + source = "terraform-aws-modules/iam/aws//modules/iam-policy" + version = "5.30.0" + + name = "${var.name}--iam-policy-read" + path = "/" + description = "Read the ${var.name} table" + + tags = { + Name = "${var.name}--iam-policy-read" + } + + policy = jsonencode({ + Version = "2012-10-17" + Statement = [ + { + Action = [ + "kms:Decrypt", + "kms:DescribeKey" + ] + Effect = "Allow" + Resource = [ + module.kms.key_arn + ] + }, + { + Effect = "Allow" + Action = [ + "dynamodb:Query", + "dynamodb:Scan", + "dynamodb:GetItem", + ], + Resource = [ + "${module.dynamodb_table.dynamodb_table_arn}*" + ] + } + ] + }) +} + +module "iam_policy_write" { + source = "terraform-aws-modules/iam/aws//modules/iam-policy" + version = "5.30.0" + + name = "${var.name}--iam-policy-write" + path = "/" + description = "Write to the ${var.name} table" + + tags = { + Name = "${var.name}--iam-policy-write" + } + + policy = jsonencode({ + Version = "2012-10-17" + Statement = [ + { + Action = [ + "kms:Encrypt", + "kms:GenerateDataKey" + ] + Effect = "Allow" + Resource = [ + module.kms.key_arn + ] + }, + { + Effect = "Allow" + Action = [ + "dynamodb:PutItem", + "dynamodb:UpdateItem", + "dynamodb:DeleteItem", + ], + Resource = [ + "${module.dynamodb_table.dynamodb_table_arn}*" + ] + } + ] + }) +} diff --git a/archived_epr/infrastructure/terraform/per_workspace/modules/api_storage/kms.tf b/archived_epr/infrastructure/terraform/per_workspace/modules/api_storage/kms.tf new file mode 100644 index 000000000..711cc56d1 --- /dev/null +++ b/archived_epr/infrastructure/terraform/per_workspace/modules/api_storage/kms.tf @@ -0,0 +1,14 @@ +module "kms" { + source = "terraform-aws-modules/kms/aws" + version = "2.0.1" + + deletion_window_in_days = var.kms_deletion_window_in_days + + # Aliases + aliases = [var.name] + + tags = { + Name = "${var.name}--kms" + } + +} diff --git a/archived_epr/infrastructure/terraform/per_workspace/modules/api_storage/outputs.tf b/archived_epr/infrastructure/terraform/per_workspace/modules/api_storage/outputs.tf new file mode 100644 index 000000000..acaab22fc --- /dev/null +++ b/archived_epr/infrastructure/terraform/per_workspace/modules/api_storage/outputs.tf @@ -0,0 +1,7 @@ +output "dynamodb_table_name" { + value = module.dynamodb_table.dynamodb_table_id +} + +output "dynamodb_table_arn" { + value = module.dynamodb_table.dynamodb_table_arn +} diff --git a/archived_epr/infrastructure/terraform/per_workspace/modules/api_storage/vars.tf b/archived_epr/infrastructure/terraform/per_workspace/modules/api_storage/vars.tf new file mode 100644 index 000000000..05f490020 --- /dev/null +++ b/archived_epr/infrastructure/terraform/per_workspace/modules/api_storage/vars.tf @@ -0,0 +1,39 @@ +variable "name" {} + +variable "environment" {} + +variable "range_key" {} + +variable "hash_key" {} + +variable "deletion_protection_enabled" { + type = bool + default = false +} + +variable "kms_deletion_window_in_days" { + type = number + default = 7 +} + +variable "attributes" { + type = list(object( + { + name = string + type = string + } + )) + default = [] +} + +variable "global_secondary_indexes" { + type = list(object( + { + name = string + hash_key = string + range_key = string + projection_type = string + } + )) + default = [] +} diff --git a/archived_epr/infrastructure/terraform/per_workspace/modules/api_worker/api_lambda/lambda.tf b/archived_epr/infrastructure/terraform/per_workspace/modules/api_worker/api_lambda/lambda.tf new file mode 100644 index 000000000..dce91f36e --- /dev/null +++ b/archived_epr/infrastructure/terraform/per_workspace/modules/api_worker/api_lambda/lambda.tf @@ -0,0 +1,37 @@ +module "lambda_function" { + source = "terraform-aws-modules/lambda/aws" + version = "6.0.0" + + function_name = var.lambda_name + description = "${replace(var.name, "_", "-")} lambda function" + handler = "api.${var.name}.index.handler" + runtime = var.python_version + timeout = 10 + memory_size = var.memory_size + + timeouts = { + create = "5m" + update = "5m" + delete = "5m" + } + + create_current_version_allowed_triggers = false + allowed_triggers = var.allowed_triggers + environment_variables = var.environment_variables + + create_package = false + local_existing_package = var.source_path + + tags = { + Name = replace(var.name, "_", "-") + } + + layers = var.layers + + trusted_entities = var.trusted_entities + attach_policy_json = var.attach_policy_json + policy_json = var.policy_json + + attach_policy_statements = var.attach_policy_statements + policy_statements = var.policy_statements +} diff --git a/archived_epr/infrastructure/terraform/per_workspace/modules/api_worker/api_lambda/outputs.tf b/archived_epr/infrastructure/terraform/per_workspace/modules/api_worker/api_lambda/outputs.tf new file mode 100644 index 000000000..3716bd331 --- /dev/null +++ b/archived_epr/infrastructure/terraform/per_workspace/modules/api_worker/api_lambda/outputs.tf @@ -0,0 +1,19 @@ +output "lambda_arn" { + value = module.lambda_function.lambda_function_arn +} + +output "lambda_role_arn" { + value = module.lambda_function.lambda_role_arn +} + +output "lambda_role_name" { + value = module.lambda_function.lambda_role_name +} + +output "metadata" { + value = { + lambda_invoke_arn = module.lambda_function.lambda_function_invoke_arn + authoriser_iam_role = module.lambda_function.lambda_role_arn + authoriser_name = var.lambda_name + } +} diff --git a/archived_epr/infrastructure/terraform/per_workspace/modules/api_worker/api_lambda/vars.tf b/archived_epr/infrastructure/terraform/per_workspace/modules/api_worker/api_lambda/vars.tf new file mode 100644 index 000000000..6303fca61 --- /dev/null +++ b/archived_epr/infrastructure/terraform/per_workspace/modules/api_worker/api_lambda/vars.tf @@ -0,0 +1,46 @@ +variable "name" {} + +variable "python_version" { +} + +variable "lambda_name" { + default = "" +} + +variable "layers" { + type = list(string) +} + +variable "source_path" {} + +variable "attach_policy_json" { + default = false +} + +variable "policy_json" { + default = "" +} + +variable "trusted_entities" { + default = [] +} + +variable "allowed_triggers" { + default = {} +} + +variable "environment_variables" { + default = {} +} + +variable "attach_policy_statements" { + default = false +} + +variable "policy_statements" { + default = {} +} + +variable "memory_size" { + default = 128 +} diff --git a/archived_epr/infrastructure/terraform/per_workspace/modules/api_worker/api_layer/layer.tf b/archived_epr/infrastructure/terraform/per_workspace/modules/api_worker/api_layer/layer.tf new file mode 100644 index 000000000..17d7cdcff --- /dev/null +++ b/archived_epr/infrastructure/terraform/per_workspace/modules/api_worker/api_layer/layer.tf @@ -0,0 +1,24 @@ +module "lambda_layer" { + source = "terraform-aws-modules/lambda/aws" + version = "6.0.0" + + timeouts = { + create = "5m" + update = "5m" + delete = "5m" + } + + create_layer = true + + layer_name = var.layer_name + description = "${replace(var.name, "_", "-")} lambda layer" + compatible_runtimes = [var.python_version] + + create_package = false + local_existing_package = var.source_path + environment_variables = var.environment_variables + + tags = { + Name = replace(var.name, "_", "-") + } +} diff --git a/archived_epr/infrastructure/terraform/per_workspace/modules/api_worker/api_layer/outputs.tf b/archived_epr/infrastructure/terraform/per_workspace/modules/api_worker/api_layer/outputs.tf new file mode 100644 index 000000000..a50986200 --- /dev/null +++ b/archived_epr/infrastructure/terraform/per_workspace/modules/api_worker/api_layer/outputs.tf @@ -0,0 +1,7 @@ +output "layer_arn" { + value = module.lambda_layer.lambda_layer_arn +} + +output "name" { + value = var.name +} diff --git a/archived_epr/infrastructure/terraform/per_workspace/modules/api_worker/api_layer/vars.tf b/archived_epr/infrastructure/terraform/per_workspace/modules/api_worker/api_layer/vars.tf new file mode 100644 index 000000000..36c4b9ff4 --- /dev/null +++ b/archived_epr/infrastructure/terraform/per_workspace/modules/api_worker/api_layer/vars.tf @@ -0,0 +1,11 @@ +variable "layer_name" {} + +variable "python_version" {} + +variable "name" {} + +variable "source_path" {} + +variable "environment_variables" { + default = {} +} diff --git a/archived_epr/infrastructure/terraform/per_workspace/modules/domain/acm.tf b/archived_epr/infrastructure/terraform/per_workspace/modules/domain/acm.tf new file mode 100644 index 000000000..c1c2be2e6 --- /dev/null +++ b/archived_epr/infrastructure/terraform/per_workspace/modules/domain/acm.tf @@ -0,0 +1,17 @@ +resource "aws_acm_certificate" "certificate" { + domain_name = var.domain + validation_method = "DNS" + + lifecycle { + create_before_destroy = true + } +} + +resource "aws_acm_certificate_validation" "validation" { + certificate_arn = aws_acm_certificate.certificate.arn + validation_record_fqdns = [for record in aws_route53_record.route : record.fqdn] + depends_on = [ + aws_route53_record.route, + aws_acm_certificate.certificate + ] +} diff --git a/archived_epr/infrastructure/terraform/per_workspace/modules/domain/outputs.tf b/archived_epr/infrastructure/terraform/per_workspace/modules/domain/outputs.tf new file mode 100644 index 000000000..71f2771ca --- /dev/null +++ b/archived_epr/infrastructure/terraform/per_workspace/modules/domain/outputs.tf @@ -0,0 +1,11 @@ +output "zone" { + value = var.zone +} + +output "domain" { + value = var.domain +} + +output "domain_cert" { + value = aws_acm_certificate.certificate.domain_name +} diff --git a/archived_epr/infrastructure/terraform/per_workspace/modules/domain/route53.tf b/archived_epr/infrastructure/terraform/per_workspace/modules/domain/route53.tf new file mode 100644 index 000000000..71688195f --- /dev/null +++ b/archived_epr/infrastructure/terraform/per_workspace/modules/domain/route53.tf @@ -0,0 +1,58 @@ +data "aws_route53_zone" "zone" { + name = var.zone + private_zone = false +} + +resource "aws_route53_record" "route" { + for_each = { + for dvo in aws_acm_certificate.certificate.domain_validation_options : dvo.domain_name => { + name = dvo.resource_record_name + record = dvo.resource_record_value + type = dvo.resource_record_type + } + } + + allow_overwrite = true + name = each.value.name + records = [each.value.record] + ttl = 60 + type = each.value.type + zone_id = data.aws_route53_zone.zone.zone_id +} + + +resource "aws_api_gateway_domain_name" "domain" { + domain_name = aws_acm_certificate.certificate.domain_name + regional_certificate_arn = aws_acm_certificate_validation.validation.certificate_arn + security_policy = "TLS_1_2" + endpoint_configuration { + types = ["REGIONAL"] + } + + # mutual_tls_authentication { + # truststore_uri = "s3://${aws_s3_object.api_truststore.bucket}/${aws_s3_object.api_truststore.key}" + # truststore_version = aws_s3_object.api_truststore.version_id + # } + + lifecycle { + create_before_destroy = true + } + + depends_on = [ + aws_acm_certificate_validation.validation + ] +} + +resource "aws_route53_record" "cname" { + zone_id = data.aws_route53_zone.zone.id + name = var.domain + type = "CNAME" + ttl = "5" + records = [ + aws_api_gateway_domain_name.domain.regional_domain_name + ] + allow_overwrite = true + depends_on = [ + aws_acm_certificate.certificate + ] +} diff --git a/archived_epr/infrastructure/terraform/per_workspace/modules/domain/vars.tf b/archived_epr/infrastructure/terraform/per_workspace/modules/domain/vars.tf new file mode 100644 index 000000000..e19bd3cb9 --- /dev/null +++ b/archived_epr/infrastructure/terraform/per_workspace/modules/domain/vars.tf @@ -0,0 +1,5 @@ +variable "domain" { +} + +variable "zone" { +} diff --git a/infrastructure/terraform/per_workspace/modules/etl/bucket_notification/main.tf b/archived_epr/infrastructure/terraform/per_workspace/modules/etl/bucket_notification/main.tf similarity index 100% rename from infrastructure/terraform/per_workspace/modules/etl/bucket_notification/main.tf rename to archived_epr/infrastructure/terraform/per_workspace/modules/etl/bucket_notification/main.tf diff --git a/infrastructure/terraform/per_workspace/modules/etl/bucket_notification/vars.tf b/archived_epr/infrastructure/terraform/per_workspace/modules/etl/bucket_notification/vars.tf similarity index 100% rename from infrastructure/terraform/per_workspace/modules/etl/bucket_notification/vars.tf rename to archived_epr/infrastructure/terraform/per_workspace/modules/etl/bucket_notification/vars.tf diff --git a/infrastructure/terraform/per_workspace/modules/etl/sds/etl-diagram--bulk-transform-and-load.asl.json b/archived_epr/infrastructure/terraform/per_workspace/modules/etl/sds/etl-diagram--bulk-transform-and-load.asl.json similarity index 100% rename from infrastructure/terraform/per_workspace/modules/etl/sds/etl-diagram--bulk-transform-and-load.asl.json rename to archived_epr/infrastructure/terraform/per_workspace/modules/etl/sds/etl-diagram--bulk-transform-and-load.asl.json diff --git a/infrastructure/terraform/per_workspace/modules/etl/sds/etl-diagram--update-transform-and-load.asl.json b/archived_epr/infrastructure/terraform/per_workspace/modules/etl/sds/etl-diagram--update-transform-and-load.asl.json similarity index 100% rename from infrastructure/terraform/per_workspace/modules/etl/sds/etl-diagram--update-transform-and-load.asl.json rename to archived_epr/infrastructure/terraform/per_workspace/modules/etl/sds/etl-diagram--update-transform-and-load.asl.json diff --git a/infrastructure/terraform/per_workspace/modules/etl/sds/etl-diagram.asl.json b/archived_epr/infrastructure/terraform/per_workspace/modules/etl/sds/etl-diagram.asl.json similarity index 100% rename from infrastructure/terraform/per_workspace/modules/etl/sds/etl-diagram.asl.json rename to archived_epr/infrastructure/terraform/per_workspace/modules/etl/sds/etl-diagram.asl.json diff --git a/infrastructure/terraform/per_workspace/modules/etl/sds/etl-state-lock-enforcer/locals.tf b/archived_epr/infrastructure/terraform/per_workspace/modules/etl/sds/etl-state-lock-enforcer/locals.tf similarity index 100% rename from infrastructure/terraform/per_workspace/modules/etl/sds/etl-state-lock-enforcer/locals.tf rename to archived_epr/infrastructure/terraform/per_workspace/modules/etl/sds/etl-state-lock-enforcer/locals.tf diff --git a/infrastructure/terraform/per_workspace/modules/etl/sds/etl-state-lock-enforcer/main.tf b/archived_epr/infrastructure/terraform/per_workspace/modules/etl/sds/etl-state-lock-enforcer/main.tf similarity index 100% rename from infrastructure/terraform/per_workspace/modules/etl/sds/etl-state-lock-enforcer/main.tf rename to archived_epr/infrastructure/terraform/per_workspace/modules/etl/sds/etl-state-lock-enforcer/main.tf diff --git a/infrastructure/terraform/per_workspace/modules/etl/sds/etl-state-lock-enforcer/output.tf b/archived_epr/infrastructure/terraform/per_workspace/modules/etl/sds/etl-state-lock-enforcer/output.tf similarity index 100% rename from infrastructure/terraform/per_workspace/modules/etl/sds/etl-state-lock-enforcer/output.tf rename to archived_epr/infrastructure/terraform/per_workspace/modules/etl/sds/etl-state-lock-enforcer/output.tf diff --git a/infrastructure/terraform/per_workspace/modules/etl/sds/etl-state-lock-enforcer/vars.tf b/archived_epr/infrastructure/terraform/per_workspace/modules/etl/sds/etl-state-lock-enforcer/vars.tf similarity index 100% rename from infrastructure/terraform/per_workspace/modules/etl/sds/etl-state-lock-enforcer/vars.tf rename to archived_epr/infrastructure/terraform/per_workspace/modules/etl/sds/etl-state-lock-enforcer/vars.tf diff --git a/infrastructure/terraform/per_workspace/modules/etl/sds/locals.tf b/archived_epr/infrastructure/terraform/per_workspace/modules/etl/sds/locals.tf similarity index 100% rename from infrastructure/terraform/per_workspace/modules/etl/sds/locals.tf rename to archived_epr/infrastructure/terraform/per_workspace/modules/etl/sds/locals.tf diff --git a/infrastructure/terraform/per_workspace/modules/etl/sds/main.tf b/archived_epr/infrastructure/terraform/per_workspace/modules/etl/sds/main.tf similarity index 100% rename from infrastructure/terraform/per_workspace/modules/etl/sds/main.tf rename to archived_epr/infrastructure/terraform/per_workspace/modules/etl/sds/main.tf diff --git a/infrastructure/terraform/per_workspace/modules/etl/sds/notify/main.tf b/archived_epr/infrastructure/terraform/per_workspace/modules/etl/sds/notify/main.tf similarity index 100% rename from infrastructure/terraform/per_workspace/modules/etl/sds/notify/main.tf rename to archived_epr/infrastructure/terraform/per_workspace/modules/etl/sds/notify/main.tf diff --git a/infrastructure/terraform/per_workspace/modules/etl/sds/notify/output.tf b/archived_epr/infrastructure/terraform/per_workspace/modules/etl/sds/notify/output.tf similarity index 100% rename from infrastructure/terraform/per_workspace/modules/etl/sds/notify/output.tf rename to archived_epr/infrastructure/terraform/per_workspace/modules/etl/sds/notify/output.tf diff --git a/infrastructure/terraform/per_workspace/modules/etl/sds/notify/vars.tf b/archived_epr/infrastructure/terraform/per_workspace/modules/etl/sds/notify/vars.tf similarity index 100% rename from infrastructure/terraform/per_workspace/modules/etl/sds/notify/vars.tf rename to archived_epr/infrastructure/terraform/per_workspace/modules/etl/sds/notify/vars.tf diff --git a/infrastructure/terraform/per_workspace/modules/etl/sds/output.tf b/archived_epr/infrastructure/terraform/per_workspace/modules/etl/sds/output.tf similarity index 100% rename from infrastructure/terraform/per_workspace/modules/etl/sds/output.tf rename to archived_epr/infrastructure/terraform/per_workspace/modules/etl/sds/output.tf diff --git a/infrastructure/terraform/per_workspace/modules/etl/sds/schedule/main.tf b/archived_epr/infrastructure/terraform/per_workspace/modules/etl/sds/schedule/main.tf similarity index 100% rename from infrastructure/terraform/per_workspace/modules/etl/sds/schedule/main.tf rename to archived_epr/infrastructure/terraform/per_workspace/modules/etl/sds/schedule/main.tf diff --git a/infrastructure/terraform/per_workspace/modules/etl/sds/schedule/vars.tf b/archived_epr/infrastructure/terraform/per_workspace/modules/etl/sds/schedule/vars.tf similarity index 100% rename from infrastructure/terraform/per_workspace/modules/etl/sds/schedule/vars.tf rename to archived_epr/infrastructure/terraform/per_workspace/modules/etl/sds/schedule/vars.tf diff --git a/infrastructure/terraform/per_workspace/modules/etl/sds/step_function_role.tf b/archived_epr/infrastructure/terraform/per_workspace/modules/etl/sds/step_function_role.tf similarity index 100% rename from infrastructure/terraform/per_workspace/modules/etl/sds/step_function_role.tf rename to archived_epr/infrastructure/terraform/per_workspace/modules/etl/sds/step_function_role.tf diff --git a/infrastructure/terraform/per_workspace/modules/etl/sds/trigger/main.tf b/archived_epr/infrastructure/terraform/per_workspace/modules/etl/sds/trigger/main.tf similarity index 100% rename from infrastructure/terraform/per_workspace/modules/etl/sds/trigger/main.tf rename to archived_epr/infrastructure/terraform/per_workspace/modules/etl/sds/trigger/main.tf diff --git a/infrastructure/terraform/per_workspace/modules/etl/sds/trigger/output.tf b/archived_epr/infrastructure/terraform/per_workspace/modules/etl/sds/trigger/output.tf similarity index 100% rename from infrastructure/terraform/per_workspace/modules/etl/sds/trigger/output.tf rename to archived_epr/infrastructure/terraform/per_workspace/modules/etl/sds/trigger/output.tf diff --git a/infrastructure/terraform/per_workspace/modules/etl/sds/trigger/vars.tf b/archived_epr/infrastructure/terraform/per_workspace/modules/etl/sds/trigger/vars.tf similarity index 100% rename from infrastructure/terraform/per_workspace/modules/etl/sds/trigger/vars.tf rename to archived_epr/infrastructure/terraform/per_workspace/modules/etl/sds/trigger/vars.tf diff --git a/infrastructure/terraform/per_workspace/modules/etl/sds/vars.tf b/archived_epr/infrastructure/terraform/per_workspace/modules/etl/sds/vars.tf similarity index 100% rename from infrastructure/terraform/per_workspace/modules/etl/sds/vars.tf rename to archived_epr/infrastructure/terraform/per_workspace/modules/etl/sds/vars.tf diff --git a/infrastructure/terraform/per_workspace/modules/etl/sds/worker/main.tf b/archived_epr/infrastructure/terraform/per_workspace/modules/etl/sds/worker/main.tf similarity index 100% rename from infrastructure/terraform/per_workspace/modules/etl/sds/worker/main.tf rename to archived_epr/infrastructure/terraform/per_workspace/modules/etl/sds/worker/main.tf diff --git a/infrastructure/terraform/per_workspace/modules/etl/sds/worker/output.tf b/archived_epr/infrastructure/terraform/per_workspace/modules/etl/sds/worker/output.tf similarity index 100% rename from infrastructure/terraform/per_workspace/modules/etl/sds/worker/output.tf rename to archived_epr/infrastructure/terraform/per_workspace/modules/etl/sds/worker/output.tf diff --git a/infrastructure/terraform/per_workspace/modules/etl/sds/worker/vars.tf b/archived_epr/infrastructure/terraform/per_workspace/modules/etl/sds/worker/vars.tf similarity index 100% rename from infrastructure/terraform/per_workspace/modules/etl/sds/worker/vars.tf rename to archived_epr/infrastructure/terraform/per_workspace/modules/etl/sds/worker/vars.tf diff --git a/archived_epr/infrastructure/terraform/per_workspace/outputs.tf b/archived_epr/infrastructure/terraform/per_workspace/outputs.tf new file mode 100644 index 000000000..0ac8a5352 --- /dev/null +++ b/archived_epr/infrastructure/terraform/per_workspace/outputs.tf @@ -0,0 +1,84 @@ +output "dynamodb_epr_table_name" { + value = module.eprtable.dynamodb_table_name +} + +output "dynamodb_cpm_table_name" { + value = module.cpmtable.dynamodb_table_name +} + +output "workspace" { + value = terraform.workspace +} + +output "workspace_type" { + value = local.workspace_type +} + +output "environment" { + value = var.environment +} + +output "invoke_url" { + value = module.api_entrypoint.invoke_url +} + +output "sds_etl" { + value = module.sds_etl +} + +output "manual_trigger_arn" { + value = module.sds_etl.manual_trigger_arn +} + +output "test_data_bucket" { + value = "${local.project}--${replace(var.account_name, "_", "-")}--test-data" +} + +output "certificate_domain_name" { + value = "https://${module.domain.domain_cert}" +} + +# output "assumed_role" { +# value = var.assume_role +# } + + +# output "layers_list" { +# value = var.layers +# } + +# output "layer_arns_object" { +# value = { +# for key, instance in module.layers : key => instance.layer_arn +# } +# } + +# output "layer_arns_array" { +# value = [ +# for instance in module.layers : instance.layer_arn +# ] +# } + +# output "lambda_list" { +# value = var.lambdas +# } + +# output "lambda_arns_object" { +# value = { +# for key, instance in module.lambdas : key => instance.lambda_arn +# } +# } + +# output "lambda_arns_array" { +# value = [ +# for instance in module.lambdas : instance.lambda_arn +# ] +# } + +# output "auth_lambda_arn" { +# value = module.authoriser.lambda_arn +# } + +# output "auth_lambda_role_arn" { +# value = module.authoriser.lambda_role_arn +# } diff --git a/archived_epr/infrastructure/terraform/per_workspace/provider.tf b/archived_epr/infrastructure/terraform/per_workspace/provider.tf new file mode 100644 index 000000000..a171cdf2a --- /dev/null +++ b/archived_epr/infrastructure/terraform/per_workspace/provider.tf @@ -0,0 +1,20 @@ +provider "aws" { + region = local.region + + assume_role { + role_arn = "arn:aws:iam::${var.assume_account}:role/${var.assume_role}" + } + + default_tags { + tags = { + Environment = var.environment + Workspace = replace(terraform.workspace, "_", "-") + Project = local.project + Name = "${local.project}--${replace(terraform.workspace, "_", "-")}" + Owner = "NHSE" + ProjectShortName = "CPM" + ProjectFullname = "Connecting Party Manager" + WorkspaceType = var.workspace_type + } + } +} diff --git a/archived_epr/infrastructure/terraform/per_workspace/terraform.tf b/archived_epr/infrastructure/terraform/per_workspace/terraform.tf new file mode 100644 index 000000000..7a5d8b32a --- /dev/null +++ b/archived_epr/infrastructure/terraform/per_workspace/terraform.tf @@ -0,0 +1,18 @@ +terraform { + backend "s3" { + encrypt = false + region = "eu-west-2" + bucket = "nhse-cpm--terraform-state-v1.0.0" + dynamodb_table = "nhse-cpm--terraform-state-lock-v1.0.0" + key = "terraform-state-infrastructure" + workspace_key_prefix = "nhse-cpm" + } + + required_providers { + aws = { + source = "hashicorp/aws" + version = "~> 5.26.0" + } + } + # required_version = ">= 1.1.0" +} diff --git a/archived_epr/infrastructure/terraform/per_workspace/vars.tf b/archived_epr/infrastructure/terraform/per_workspace/vars.tf new file mode 100644 index 000000000..e545dcd2e --- /dev/null +++ b/archived_epr/infrastructure/terraform/per_workspace/vars.tf @@ -0,0 +1,72 @@ +variable "account_name" { + type = string +} + +variable "assume_account" { + sensitive = true +} + +variable "assume_role" {} + +variable "environment" {} + +variable "deletion_protection_enabled" { + type = bool + default = false +} + +variable "expiration_date" { + default = "NEVER" +} + +variable "updated_date" { + default = "NEVER" +} + +variable "layers_directory" { + default = "../src/layers" +} + +variable "layers" { + type = list(string) +} + +variable "third_party_layers" { + type = list(string) +} + +variable "api_lambda_layers" { + type = list(string) + default = [ + "domain", + "event", + "api_utils", + "sds" + ] +} + +variable "lambdas" { + type = list(string) +} + +variable "workspace_type" { + type = string + default = "PERSISTENT" +} + +variable "apigateway_arn_prefix" { + type = string + default = "arn:aws:apigateway:eu-west-2:lambda:path/2015-03-31/functions/arn:aws:lambda:eu-west-2" +} + +variable "python_version" { + default = "python3.12" +} + +variable "domain" { + type = string +} + +variable "lambda_memory_size" { + default = 128 +} diff --git a/scripts/etl/clear_state_inputs.py b/archived_epr/scripts/etl/clear_state_inputs.py similarity index 100% rename from scripts/etl/clear_state_inputs.py rename to archived_epr/scripts/etl/clear_state_inputs.py index 314f4cd43..328c20729 100644 --- a/scripts/etl/clear_state_inputs.py +++ b/archived_epr/scripts/etl/clear_state_inputs.py @@ -8,10 +8,10 @@ from collections import deque import boto3 +from etl.sds.tests.etl_test_utils.etl_state import _delete_objects_by_prefix from etl_utils.constants import CHANGELOG_NUMBER, WorkerKey from etl_utils.io import pkl_dumps_lz4 -from etl.sds.tests.etl_test_utils.etl_state import _delete_objects_by_prefix from test_helpers.aws_session import aws_session from test_helpers.terraform import read_terraform_output diff --git a/scripts/etl/decode_load.py b/archived_epr/scripts/etl/decode_load.py similarity index 100% rename from scripts/etl/decode_load.py rename to archived_epr/scripts/etl/decode_load.py diff --git a/scripts/etl/etl.mk b/archived_epr/scripts/etl/etl.mk similarity index 100% rename from scripts/etl/etl.mk rename to archived_epr/scripts/etl/etl.mk diff --git a/scripts/etl/head_etl.py b/archived_epr/scripts/etl/head_etl.py similarity index 100% rename from scripts/etl/head_etl.py rename to archived_epr/scripts/etl/head_etl.py diff --git a/scripts/etl/ldif_cleanup.py b/archived_epr/scripts/etl/ldif_cleanup.py similarity index 100% rename from scripts/etl/ldif_cleanup.py rename to archived_epr/scripts/etl/ldif_cleanup.py diff --git a/src/api/tests/sds_data_tests/calculation.py b/archived_epr/src_old/api/tests/sds_data_tests/calculation.py similarity index 100% rename from src/api/tests/sds_data_tests/calculation.py rename to archived_epr/src_old/api/tests/sds_data_tests/calculation.py diff --git a/src/api/tests/sds_data_tests/conftest.py b/archived_epr/src_old/api/tests/sds_data_tests/conftest.py similarity index 100% rename from src/api/tests/sds_data_tests/conftest.py rename to archived_epr/src_old/api/tests/sds_data_tests/conftest.py diff --git a/src/api/tests/sds_data_tests/data/sds_fhir_api.failed_queries.device.json b/archived_epr/src_old/api/tests/sds_data_tests/data/sds_fhir_api.failed_queries.device.json similarity index 100% rename from src/api/tests/sds_data_tests/data/sds_fhir_api.failed_queries.device.json rename to archived_epr/src_old/api/tests/sds_data_tests/data/sds_fhir_api.failed_queries.device.json diff --git a/src/api/tests/sds_data_tests/data/sds_fhir_api.speed_test_queries.device.json b/archived_epr/src_old/api/tests/sds_data_tests/data/sds_fhir_api.speed_test_queries.device.json similarity index 100% rename from src/api/tests/sds_data_tests/data/sds_fhir_api.speed_test_queries.device.json rename to archived_epr/src_old/api/tests/sds_data_tests/data/sds_fhir_api.speed_test_queries.device.json diff --git a/src/api/tests/sds_data_tests/data/sds_fhir_api.unique_queries.device.json b/archived_epr/src_old/api/tests/sds_data_tests/data/sds_fhir_api.unique_queries.device.json similarity index 100% rename from src/api/tests/sds_data_tests/data/sds_fhir_api.unique_queries.device.json rename to archived_epr/src_old/api/tests/sds_data_tests/data/sds_fhir_api.unique_queries.device.json diff --git a/src/api/tests/sds_data_tests/data/sds_fhir_api.unique_queries.endpoint.json b/archived_epr/src_old/api/tests/sds_data_tests/data/sds_fhir_api.unique_queries.endpoint.json similarity index 100% rename from src/api/tests/sds_data_tests/data/sds_fhir_api.unique_queries.endpoint.json rename to archived_epr/src_old/api/tests/sds_data_tests/data/sds_fhir_api.unique_queries.endpoint.json diff --git a/src/api/tests/sds_data_tests/data/sds_fhir_queries_errors.json b/archived_epr/src_old/api/tests/sds_data_tests/data/sds_fhir_queries_errors.json similarity index 100% rename from src/api/tests/sds_data_tests/data/sds_fhir_queries_errors.json rename to archived_epr/src_old/api/tests/sds_data_tests/data/sds_fhir_queries_errors.json diff --git a/src/api/tests/sds_data_tests/run_requests.sh b/archived_epr/src_old/api/tests/sds_data_tests/run_requests.sh similarity index 100% rename from src/api/tests/sds_data_tests/run_requests.sh rename to archived_epr/src_old/api/tests/sds_data_tests/run_requests.sh diff --git a/src/api/tests/sds_data_tests/test_sds_data.py b/archived_epr/src_old/api/tests/sds_data_tests/test_sds_data.py similarity index 100% rename from src/api/tests/sds_data_tests/test_sds_data.py rename to archived_epr/src_old/api/tests/sds_data_tests/test_sds_data.py diff --git a/archived_epr/src_old/conftest.py b/archived_epr/src_old/conftest.py new file mode 100644 index 000000000..dddf361e3 --- /dev/null +++ b/archived_epr/src_old/conftest.py @@ -0,0 +1,179 @@ +import json +import time +from pathlib import Path + +import boto3 +from etl_utils.constants import ETL_STATE_LOCK +from event.aws.client import dynamodb_client +from event.logging.logger import setup_logger +from nhs_context_logging.fixtures import ( # noqa: F401 + log_capture_fixture as log_capture, +) +from nhs_context_logging.fixtures import ( # noqa: F401 + log_capture_global_fixture as log_capture_global, +) +from nhs_context_logging.formatters import json_serializer +from pytest import Config, FixtureRequest, Item, Parser, fixture + +from test_helpers.aws_session import aws_session +from test_helpers.constants import PROJECT_ROOT +from test_helpers.dynamodb import clear_dynamodb_table +from test_helpers.terraform import read_terraform_output + + +def pytest_addoption(parser: Parser): + parser.addoption("--suppress-logs", action="store", default=False) + + +def is_integration(request: FixtureRequest) -> bool: + return request.node.get_closest_marker("integration") is not None + + +def is_smoke(request: FixtureRequest) -> bool: + return request.node.get_closest_marker("smoke") is not None + + +def is_s3(request: FixtureRequest) -> bool: + return request.node.get_closest_marker("s3") is not None + + +def is_matrix(request: FixtureRequest) -> bool: + return request.node.get_closest_marker("matrix") is not None + + +def dynamodb_client_with_sleep(): + """ + Since we use GSIs we need to give time for the GSI-projections time + to sync with the root table. We have implemented sleeps: + + * after 'write' operations. + * before 'query' operations. + """ + client = dynamodb_client() + unpatched_transact_write_items = client.transact_write_items + unpatched_query = client.query + + def _transact_write_items_with_sleep(*args, **kwargs): + response = unpatched_transact_write_items(*args, **kwargs) + time.sleep(0.5) + return response + + def _query_with_sleep(*args, **kwargs): + time.sleep(0.2) + response = unpatched_query(*args, **kwargs) + return response + + client.transact_write_items = _transact_write_items_with_sleep + client.query = _query_with_sleep + return client + + +def download_files_from_s3(request: FixtureRequest): + client = boto3.client("s3") + test_data_bucket = read_terraform_output("test_data_bucket.value") + s3_paths = [] + for key in request.node.get_closest_marker("s3").args: + download_path = PROJECT_ROOT / ".downloads" / Path(key) + s3_paths.append(download_path) + if download_path.exists(): + continue + download_path.parent.mkdir(parents=True, exist_ok=True) + client.download_file( + Bucket=test_data_bucket, Key=key, Filename=str(download_path) + ) + return s3_paths + + +def pytest_collection_modifyitems(items: list[Item], config: Config): + """Add 'unit' marker to unmarked tests""" + custom_markers = config._getini(("markers")) + hypothesis_marker_idx = custom_markers.index( + "hypothesis: Tests which use hypothesis." + ) + custom_markers = custom_markers[:hypothesis_marker_idx] + for item in items: + unmarked_test = True + for marker in item.iter_markers(): + if marker.name in custom_markers: + unmarked_test = False + break + if unmarked_test: + item.add_marker("unit") + + +@fixture(autouse=True) +def log_on_failure(pytestconfig: Config, request: FixtureRequest, log_capture): + setup_logger(request.node.name) + + if pytestconfig.getoption("suppress_logs") is not False: + from nhs_context_logging import app_logger + + app_logger.log = lambda *args, **kwargs: None + + exception = None + try: + yield + except Exception as exception: + pass + + std_out, std_err = log_capture + for log in (*std_out, *std_err): + if pytestconfig.getoption("suppress_logs") is False: + serialised = json_serializer(log) + print(json.dumps(serialised, indent=2)) # noqa: T201 + + if isinstance(exception, Exception): + raise exception + + +@fixture(autouse=True) +def aws_session_(request: FixtureRequest): + if is_integration(request): + with aws_session(): + yield + elif is_smoke(request): + with aws_session(role_name="NHSSmokeTestRole"): + yield + else: + yield + + +@fixture(autouse=True) +def clear_dynamodb_table_(request: FixtureRequest): + if is_integration(request): + client = dynamodb_client() + table_name_cpm = read_terraform_output("dynamodb_cpm_table_name.value") + table_name_epr = read_terraform_output("dynamodb_epr_table_name.value") + clear_dynamodb_table(client=client, table_name=table_name_cpm) + clear_dynamodb_table(client=client, table_name=table_name_epr) + yield + else: + yield + + +@fixture(autouse=True) +def clear_etl_state_lock_(request: FixtureRequest): + if is_integration(request): + s3_client = boto3.client("s3") + bucket_name = read_terraform_output("sds_etl.value.bucket") + s3_client.delete_object(Bucket=bucket_name, Key=ETL_STATE_LOCK) + yield + else: + yield + + +@fixture(autouse=True) +def test_data_paths(request: FixtureRequest): + """ + Returns local paths to downloaded s3 files. This complements the marker + + 'pytest.mark.s3("path/to/file/in/test_bucket", "path/to/other/file/in/test_bucket")' + + which this fixture hooks to via 'is_s3' + """ + if is_s3(request): + with aws_session(): + paths = download_files_from_s3(request) + yield paths + else: + yield diff --git a/archived_epr/src_old/layers/domain/core/aggregate_root.py b/archived_epr/src_old/layers/domain/core/aggregate_root.py new file mode 100644 index 000000000..d52c79992 --- /dev/null +++ b/archived_epr/src_old/layers/domain/core/aggregate_root.py @@ -0,0 +1,150 @@ +import datetime +from collections.abc import Callable +from functools import wraps +from typing import Protocol + +import orjson +from attr import asdict +from domain.core.error import EventUpdatedError, ImmutableFieldError, UnknownFields +from domain.core.timestamp import now +from pydantic import Field, validate_model + +from .base import BaseModel +from .event import Event, ExportedEventsTypeDef + +UPDATED_ON = "updated_on" + + +def _validate_model(model, input_data): + """ + Shallow wrapper around pydantic's 'validate_model' to raise an error + if one was discovered during validation of the input_data against the model + """ + _, _, error = validate_model(model=model, input_data=input_data) + if error is not None: + raise error + + +class AggregateRoot(BaseModel): + """ + Entities in the domain are arranged as collections, known as Aggregates, and + one object in that collection is called the Aggregate Root. The Aggregate + Root object owns all the other objects in the aggregate. Deleting the root + deletes them. + + e.g. Products include relationships and keys. + Product + +-- Relationship + +-- ProductKey + + Amendments to the AggregateRoot will results in events being added to an + internal `events` property + """ + + class Config: + """ + Events are not pydantic classes + """ + + arbitrary_types_allowed = True + + events: list[Event] = Field(default_factory=list, exclude=True) + + def add_event(self, event: Event) -> Event: + """ + Add an event to the internal queue + """ + self.events.append(event) + return event + + def clear_events(self): + """ + Clear events stored on the AR once they've been processed (e.g. passed + to the event bus or for testing purposes) + """ + self.events.clear() + + def export_events(self) -> ExportedEventsTypeDef: + """ + Export events in a form that is independent of the domain. The form is: + + [ + {"event_name_in_snake_case": {"event_data": "event_value", ...}}, + ... + ] + + for example: + + [ + {"device_created_event": {"id": "123", ...}}, + {"device_key_added_event": {"key_type": "asid", ...}}, + ... + ] + + """ + return list( + {event.public_name: asdict(event, recurse=False)} for event in self.events + ) + + @property + def model_fields(self) -> set[str]: + return set( + field_name + for field_name in self.__fields__ + if self.__fields__[field_name].field_info.exclude is not True + ) + + @property + def immutable_fields(self) -> set[str]: + return set( + field_name + for field_name in self.__fields__ + if self.__fields__[field_name].field_info.extra.get("immutable") is True + ) + + def _update[K, V](self, data: dict[K, V]) -> dict[K, V]: + fields_to_update = set(data) + unknown_fields = fields_to_update - self.model_fields + if unknown_fields: + raise UnknownFields(", ".join(unknown_fields)) + + immutable_fields = fields_to_update.intersection(self.immutable_fields) + if immutable_fields: + raise ImmutableFieldError(", ".join(immutable_fields)) + + _data = self.dict() + _data.update(data) + _validate_model(model=self.__class__, input_data=_data) + + for field, value in data.items(): + setattr(self, field, value) + return _data + + def state(self) -> dict: + """Returns a deepcopy, useful for bulk operations rather than dealing with events""" + return orjson.loads(self.json()) + + +class Updatable(Protocol): + updated_on: datetime + + +def _set_updated_on(updatable: Updatable, event: "Event"): + if not hasattr(event, UPDATED_ON): + raise EventUpdatedError( + f"All returned events must have attribute '{UPDATED_ON}'" + ) + updated_on = getattr(event, UPDATED_ON) or now() + setattr(event, UPDATED_ON, updated_on) + updatable.updated_on = updated_on + + +def event[RT, **P](fn: Callable[P, RT]) -> Callable[P, RT]: + @wraps(fn) + def wrapper(self: AggregateRoot, *args: P.args, **kwargs: P.kwargs) -> RT: + _event = fn(self, *args, **kwargs) + self.add_event(_event) + _set_updated_on(updatable=self, event=_event) + return _event + + return wrapper diff --git a/archived_epr/src_old/layers/domain/core/base.py b/archived_epr/src_old/layers/domain/core/base.py new file mode 100644 index 000000000..263a40243 --- /dev/null +++ b/archived_epr/src_old/layers/domain/core/base.py @@ -0,0 +1,28 @@ +from types import FunctionType + +import orjson +from pydantic import BaseModel as _BaseModel + + +def orjson_dumps(v, *, default): + # orjson.dumps returns bytes, to match standard json.dumps we need to decode + return orjson.dumps(v, default=default).decode() + + +class BaseModel(_BaseModel): + class Config: + arbitrary_types_allowed = True + json_encoders = { + set: list, + FunctionType: lambda fn: fn.__name__, + type: lambda _type: _type.__name__, + } + json_dumps = orjson_dumps + + @classmethod + def get_all_fields(cls) -> set[str]: + return set(cls.__fields__.keys()) + + @classmethod + def get_mandatory_fields(cls) -> set[str]: + return set(f.name for f in cls.__fields__.values() if f.required) diff --git a/src/layers/domain/core/epr_product/__init__.py b/archived_epr/src_old/layers/domain/core/cpm_product/__init__.py similarity index 100% rename from src/layers/domain/core/epr_product/__init__.py rename to archived_epr/src_old/layers/domain/core/cpm_product/__init__.py diff --git a/archived_epr/src_old/layers/domain/core/cpm_product/tests/test_cpm_product_v1.py b/archived_epr/src_old/layers/domain/core/cpm_product/tests/test_cpm_product_v1.py new file mode 100644 index 000000000..22193fa98 --- /dev/null +++ b/archived_epr/src_old/layers/domain/core/cpm_product/tests/test_cpm_product_v1.py @@ -0,0 +1,43 @@ +from datetime import datetime + +import pytest +from domain.core.cpm_product import CpmProduct +from domain.core.cpm_system_id import ProductId + + +@pytest.fixture +def cpm_product(): + product = CpmProduct( + name="Foo", + ods_code="ABC123", + product_team_id="ABC123.18934119-5780-4d28-b9be-0e6dff3908ba", + ) + return product + + +def test_cpm_product_created(cpm_product: CpmProduct): + assert isinstance(cpm_product.created_on, datetime) + assert isinstance(cpm_product.id, ProductId) + assert isinstance(cpm_product.name, str) + assert isinstance(cpm_product.ods_code, str) + assert isinstance(cpm_product.product_team_id, str) + assert isinstance(cpm_product.created_on, datetime) + + +@pytest.mark.parametrize( + "invalid_product_id", + [ + "P.111-XXX", # Contains invalid characters + "P.AAA.AAA", # Uses '.' instead of '-' as the separator + "P.AC-33A", # Not enough characters + "P.ACCC-33A", # Too many characters + ], +) +def test_invalid_product_id(invalid_product_id): + with pytest.raises(ValueError): + CpmProduct( + id=invalid_product_id, + name="Foo", + ods_code="ABC123", + product_team_id="ABC123.18934119-5780-4d28-b9be-0e6dff3908ba", + ) diff --git a/archived_epr/src_old/layers/domain/core/cpm_product/v1.py b/archived_epr/src_old/layers/domain/core/cpm_product/v1.py new file mode 100644 index 000000000..07e07f272 --- /dev/null +++ b/archived_epr/src_old/layers/domain/core/cpm_product/v1.py @@ -0,0 +1,98 @@ +from datetime import datetime + +from attr import dataclass +from domain.core.aggregate_root import AggregateRoot +from domain.core.cpm_system_id import ProductId +from domain.core.device import UPDATED_ON, event +from domain.core.enum import Status +from domain.core.error import DuplicateError +from domain.core.event import Event, EventDeserializer +from domain.core.product_key import ProductKey +from domain.core.timestamp import now +from domain.core.validation import CPM_PRODUCT_NAME_REGEX +from pydantic import Field + + +@dataclass(kw_only=True, slots=True) +class CpmProductCreatedEvent(Event): + id: str + product_team_id: str + name: str + ods_code: str + status: Status + created_on: str + updated_on: str = None + deleted_on: str = None + + +@dataclass(kw_only=True, slots=True) +class CpmProductKeyAddedEvent(Event): + new_key: dict + id: str + product_team_id: str + name: str + ods_code: str + status: Status + created_on: str + updated_on: str = None + deleted_on: str = None + keys: list[dict] + + +@dataclass(kw_only=True, slots=True) +class CpmProductDeletedEvent(Event): + id: str + product_team_id: str + name: str + ods_code: str + status: Status + created_on: str + updated_on: str + deleted_on: str + keys: list[ProductKey] + + +class CpmProduct(AggregateRoot): + """ + A product in the database. + """ + + id: ProductId = Field(default_factory=ProductId.create) + product_team_id: str = Field(...) + name: str = Field(regex=CPM_PRODUCT_NAME_REGEX, min_length=1) + ods_code: str + status: Status = Status.ACTIVE + created_on: datetime = Field(default_factory=now, immutable=True) + updated_on: datetime = Field(default=None) + deleted_on: datetime = Field(default=None) + keys: list[ProductKey] = Field(default_factory=list) + + @event + def add_key(self, key_type: str, key_value: str) -> CpmProductKeyAddedEvent: + product_key = ProductKey(key_value=key_value, key_type=key_type) + if product_key in self.keys: + raise DuplicateError( + f"It is forbidden to supply duplicate keys: '{key_type}':'{key_value}'" + ) + self.keys.append(product_key) + product_data = self.state() + product_data.pop(UPDATED_ON) # The @event decorator will handle updated_on + return CpmProductKeyAddedEvent(new_key=product_key.dict(), **product_data) + + @event + def delete(self): + deleted_on = now() + product_data = self._update( + data=dict( + status=Status.INACTIVE, updated_on=deleted_on, deleted_on=deleted_on + ) + ) + return CpmProductDeletedEvent(**product_data) + + +class CpmProductEventDeserializer(EventDeserializer): + event_types = ( + CpmProductCreatedEvent, + CpmProductKeyAddedEvent, + CpmProductDeletedEvent, + ) diff --git a/src/layers/domain/core/product_team_epr/__init__.py b/archived_epr/src_old/layers/domain/core/cpm_system_id/__init__.py similarity index 100% rename from src/layers/domain/core/product_team_epr/__init__.py rename to archived_epr/src_old/layers/domain/core/cpm_system_id/__init__.py diff --git a/archived_epr/src_old/layers/domain/core/cpm_system_id/generated_ids/product_ids.json b/archived_epr/src_old/layers/domain/core/cpm_system_id/generated_ids/product_ids.json new file mode 100644 index 000000000..e8e790501 --- /dev/null +++ b/archived_epr/src_old/layers/domain/core/cpm_system_id/generated_ids/product_ids.json @@ -0,0 +1,202 @@ +[ + "P.PV9-GTU", + "P.G9T-UAR", + "P.EGF-DHU", + "P.7GX-ANH", + "P.YVT-UPU", + "P.CUD-97R", + "P.RWV-7NY", + "P.DYD-3RG", + "P.66R-VWJ", + "P.9AK-LMH", + "P.4MK-XWK", + "P.4CU-UKP", + "P.T4H-X3P", + "P.WW7-FCD", + "P.97J-HN9", + "P.F9V-6JK", + "P.L97-V4V", + "P.4NT-EGL", + "P.3KL-X6F", + "P.WUD-WAN", + "P.LMA-FVM", + "P.3DR-EK3", + "P.4HC-WJN", + "P.UVV-XVF", + "P.HUX-WFD", + "P.TK6-6Y9", + "P.MNK-MYL", + "P.K96-UND", + "P.U6V-KGF", + "P.PXT-WJL", + "P.CXW-CRT", + "P.HWC-GLY", + "P.3P9-4RU", + "P.MTD-AF7", + "P.C6E-334", + "P.H6K-7VL", + "P.RVX-E7T", + "P.AY3-ERX", + "P.HDV-PGC", + "P.FKR-LH9", + "P.HTJ-YFF", + "P.HXU-LKP", + "P.CHV-GRC", + "P.HR9-9PU", + "P.HPM-9MC", + "P.3FL-PTA", + "P.JFD-9RA", + "P.GV7-FPE", + "P.VRM-PHG", + "P.3WF-CNA", + "P.4RN-VLN", + "P.464-URD", + "P.HM4-XAM", + "P.L6D-XK7", + "P.N6T-MHN", + "P.6YL-D93", + "P.4KJ-DHT", + "P.GT4-9DY", + "P.A4C-EKV", + "P.6TA-Y7V", + "P.HDJ-RXK", + "P.NT9-M9A", + "P.YUP-T9J", + "P.EJK-ACK", + "P.JG7-LCA", + "P.AKM-MLU", + "P.TVU-MVK", + "P.GHK-HPN", + "P.PPW-73K", + "P.P4F-9X9", + "P.4R6-YXX", + "P.LRN-UWP", + "P.P4P-DXM", + "P.H9X-PD4", + "P.U6N-CPC", + "P.L9N-Y3T", + "P.DUM-YKT", + "P.KLF-RUP", + "P.JDN-3M6", + "P.WTJ-FJT", + "P.6AK-96A", + "P.AEP-C4V", + "P.LFE-RCF", + "P.YY4-J46", + "P.U6J-EEU", + "P.FRM-FGP", + "P.9KP-MUR", + "P.VKF-LUV", + "P.JVG-CA4", + "P.MNE-V6V", + "P.RK9-CKM", + "P.9RJ-VTW", + "P.C9W-CLJ", + "P.74C-9DD", + "P.WFK-V63", + "P.UE4-CDG", + "P.YEU-NPF", + "P.YPP-NTU", + "P.9MX-PWE", + "P.HFC-YXE", + "P.K6N-D4X", + "P.NMX-MHC", + "P.YMA-6HR", + "P.ACG-D7G", + "P.VRA-F9U", + "P.KG3-CHW", + "P.3RN-LKG", + "P.DXE-FWE", + "P.T46-YEE", + "P.LWW-GFN", + "P.MLH-97C", + "P.LLT-HYM", + "P.4VH-YLN", + "P.RVC-APA", + "P.JNH-MWV", + "P.JV6-H4R", + "P.6E9-R3R", + "P.CRM-VWR", + "P.Y9N-4CX", + "P.6DP-3MK", + "P.DWR-VM6", + "P.J9E-DCX", + "P.K7C-X7D", + "P.EWX-ACY", + "P.JFX-3M4", + "P.E9J-MWP", + "P.HKM-CPR", + "P.JD3-X73", + "P.AM9-GKE", + "P.EN3-UFR", + "P.7LY-KF7", + "P.7AG-7KN", + "P.PEH-UFM", + "P.YLU-RM4", + "P.9XP-NX6", + "P.EC3-776", + "P.64E-T3A", + "P.THY-GVT", + "P.HKM-MGU", + "P.XTW-LG3", + "P.C7C-TXK", + "P.HWL-JE7", + "P.HCV-TTE", + "P.9RW-9FX", + "P.XWA-VFF", + "P.JVH-AWX", + "P.CPY-J4W", + "P.7JR-XAP", + "P.KM7-439", + "P.CTC-UEM", + "P.DUG-WPW", + "P.DKM-L96", + "P.HDT-EAG", + "P.U9F-D3X", + "P.6KC-WPU", + "P.CCW-G73", + "P.DGT-RFV", + "P.TNG-GXH", + "P.TUR-X4P", + "P.RDD-M6U", + "P.EKF-XRK", + "P.AVN-DN7", + "P.R6V-EVL", + "P.K4P-RHH", + "P.YEU-DLE", + "P.JUW-CRP", + "P.JPT-6AY", + "P.XMX-4CN", + "P.PLT-9YV", + "P.KFH-XGK", + "P.PGF-6N9", + "P.PDG-7HT", + "P.TW7-T6H", + "P.D3Y-H7J", + "P.4H4-4FJ", + "P.CPH-FRH", + "P.7JD-MUC", + "P.4RE-CKC", + "P.3GG-DW6", + "P.M3H-7VM", + "P.LYL-R7V", + "P.PEL-CE3", + "P.46W-MHJ", + "P.RKK-P46", + "P.AE9-96P", + "P.DEH-9YY", + "P.6LF-D3M", + "P.ULK-K93", + "P.D3C-YW7", + "P.CVM-E4J", + "P.ETY-HVD", + "P.3H3-J34", + "P.NDG-Y9U", + "P.49J-A7U", + "P.RYF-MMM", + "P.NXD-R7N", + "P.F7G-GVH", + "P.9CY-HPV", + "P.GM9-G6H", + "P.WPN-N9R" +] diff --git a/archived_epr/src_old/layers/domain/core/cpm_system_id/tests/test_cpm_system_id_v1.py b/archived_epr/src_old/layers/domain/core/cpm_system_id/tests/test_cpm_system_id_v1.py new file mode 100644 index 000000000..3bbd5739c --- /dev/null +++ b/archived_epr/src_old/layers/domain/core/cpm_system_id/tests/test_cpm_system_id_v1.py @@ -0,0 +1,153 @@ +import os +from pathlib import Path + +import pytest +from domain.core.cpm_system_id import ( + PRODUCT_TEAM_EPR_ID_PATTERN, + PRODUCT_TEAM_ID_PATTERN, + AsidId, + PartyKeyId, + ProductId, + ProductTeamId, +) +from event.json import json_load + +PATH_TO_CPM_SYSTEM_IDS = Path(__file__).parent.parent +PRODUCT_IDS_GENERATED_FILE = f"{PATH_TO_CPM_SYSTEM_IDS}/generated_ids/product_ids.json" +generated_product_ids = set() + + +@pytest.fixture(scope="module") +def _get_generated_ids(): + global generated_product_ids + if os.path.exists(PRODUCT_IDS_GENERATED_FILE): + with open(PRODUCT_IDS_GENERATED_FILE, "r") as file: + generated_product_ids = set(json_load(file)) + + +def test_cpm_product_team_id_generator(): + generator = ProductTeamId.create() + assert PRODUCT_TEAM_ID_PATTERN.match(generator.id) + + +def test_epr_product_team_id_generator(): + generator = ProductTeamId.create(ods_code="ABC") + assert PRODUCT_TEAM_EPR_ID_PATTERN.match(generator.id) + + +def test_cpm_product_tema_id_validate_id_valid(): + valid_key = "3150ac97-45d0-40f6-904f-c6422c46e711" + is_valid = ProductTeamId.validate(valid_key) + assert is_valid + + +def test_epr_product_tema_id_validate_id_valid(): + valid_key = "ABC.3150ac97-45d0-40f6-904f-c6422c46e711" + is_valid = ProductTeamId.validate(valid_key) + assert is_valid + + +def test_party_key_generator_format_key(): + generator = PartyKeyId.create(current_number=123456, ods_code="ABC") + expected_key = "ABC-123457" # Expecting the number to be formatted with 6 digits + assert generator.id == expected_key + + +def test_party_key_generator_validate_key_valid(): + valid_key = "ABC-123457" + is_valid = PartyKeyId.validate_cpm_system_id(valid_key) + assert is_valid + + +@pytest.mark.parametrize( + "invalid_key", + [ + "ABC000124", # Missing hyphen + "ABC-1234", # Number part too short + "ABC-123456789101112", # Number part too long + "ABC-0001A4", # Number part contains a non-digit character + "", # Empty string + ], +) +def test_party_key_generator_validate_key_invalid_format(invalid_key): + is_valid = PartyKeyId.validate_cpm_system_id(invalid_key) + assert not is_valid + + +def test_party_key_generator_increment_number(): + # Test that the number is incremented correctly + generator = PartyKeyId.create(current_number=123456, ods_code="XYZ") + expected_key = "XYZ-123457" # Expecting increment from 123456 to 123457 + assert generator.id == expected_key + + +def test_asid_generator_validate_key_valid(): + valid_key = "223456789014" + is_valid = AsidId.validate_cpm_system_id(valid_key) + assert is_valid + + +@pytest.mark.parametrize( + "invalid_key", + [ + "1234567890123", + "12345678901", + "1234567890", + "123456789", + "12345678", + "1234567", + "123456", + "12345", + "1234", + "123", + "12", + "1", + "", # Empty string + ], +) +def test_asid_generator_validate_key_invalid_format(invalid_key): + is_valid = AsidId.validate_cpm_system_id(invalid_key) + assert not is_valid + + +def test_asid_generator_increment_number(): + # Test that the number is incremented correctly + generator = AsidId.create(current_number=223456789012) + assert generator.id == "223456789013" + + +@pytest.mark.repeat(50) +def test_product_id_generator_format_key(_get_generated_ids): + generator = ProductId.create() + assert generator.id is not None + assert generator.id not in generated_product_ids + + +@pytest.mark.parametrize( + "valid_key", + [ + "P.AAA-333", + "P.AC3-333", + "P.ACC-33A", + ], +) +def test_product_id_generator_validate_key_valid(valid_key): + is_valid = ProductId.validate_cpm_system_id(cpm_system_id=valid_key) + assert is_valid + + +@pytest.mark.parametrize( + "invalid_key", + [ + "P.BBB-111", # Invalid characters + "AAC346", # Missing 'P.' and hyphen + "P-ACD-333", # Extra hyphen + "P.ACC344", # Missing hyphen + "P.ACC-3467", # Too many digits + "P.AAC-34", # Too few digits + "", # Empty string + ], +) +def test_product_id_generator_validate_key_invalid_format(invalid_key): + is_valid = ProductId.validate_cpm_system_id(cpm_system_id=invalid_key) + assert not is_valid diff --git a/archived_epr/src_old/layers/domain/core/cpm_system_id/v1.py b/archived_epr/src_old/layers/domain/core/cpm_system_id/v1.py new file mode 100644 index 000000000..0eeed8bb6 --- /dev/null +++ b/archived_epr/src_old/layers/domain/core/cpm_system_id/v1.py @@ -0,0 +1,158 @@ +import os +import random +import re +from abc import ABC, abstractmethod +from datetime import datetime +from functools import cache +from pathlib import Path +from uuid import uuid4 + +from domain.core.base import BaseModel +from domain.core.device_key import validate_key +from domain.core.error import InvalidKeyPattern +from domain.core.product_key import ProductKeyType +from event.json import json_load +from pydantic import validator + +FIRST_ASID = 200000099999 +FIRST_PARTY_KEY = 849999 + +PRODUCT_ID_PART_LENGTH = 3 +PRODUCT_ID_NUMBER_OF_PARTS: int = 2 +PRODUCT_ID_VALID_CHARS = "ACDEFGHJKLMNPRTUVWXY34679" # pragma: allowlist secret +PRODUCT_ID_PATTERN = re.compile( + rf"^P\.[{PRODUCT_ID_VALID_CHARS}]{{{PRODUCT_ID_PART_LENGTH}}}-[{PRODUCT_ID_VALID_CHARS}]{{{PRODUCT_ID_PART_LENGTH}}}$" +) + +PATH_TO_CPM_SYSTEM_IDS = Path(__file__).parent +PRODUCT_IDS_GENERATED_FILE = f"{PATH_TO_CPM_SYSTEM_IDS}/generated_ids/product_ids.json" +PRODUCT_TEAM_EPR_ID_PATTERN = re.compile( + r"^[a-zA-Z0-9]+\.([a-fA-F0-9]{8}-[a-fA-F0-9]{4}-[a-fA-F0-9]{4}-[a-fA-F0-9]{4}-[a-fA-F0-9]{12})$" +) +PRODUCT_TEAM_ID_PATTERN = re.compile( + r"^([a-fA-F0-9]{8}-[a-fA-F0-9]{4}-[a-fA-F0-9]{4}-[a-fA-F0-9]{4}-[a-fA-F0-9]{12})$" +) + + +@cache +def _load_existing_ids(): + if os.path.exists(PRODUCT_IDS_GENERATED_FILE): + with open(PRODUCT_IDS_GENERATED_FILE, "r") as file: + return set(json_load(file)) + return set() + + +class CpmSystemId(BaseModel, ABC): + __root__: str = None + + @classmethod + @abstractmethod + def create(cls, current_number=None, **kwargs): + """Create a new instance of the ID.""" + pass + + @classmethod + @abstractmethod + def validate_cpm_system_id(cls, cpm_system_id: str) -> bool: + """Validate the key format.""" + pass + + @property + def id(self): + return self.__root__ + + @validator("__root__") + def validate_root(cls, cpm_system_id): + if not cls.validate_cpm_system_id(cpm_system_id): + raise ValueError("Invalid cpm system id provided") + + return cpm_system_id + + def __str__(self): + return self.id + + +class AsidId(CpmSystemId): + + @classmethod + def create(cls, current_number: int): + current_number = current_number or FIRST_ASID + latest_number = current_number + 1 + return cls(__root__=f"{latest_number:012d}") + + @classmethod + def validate_cpm_system_id(cls, cpm_system_id: str) -> bool: + """Validate that the ASID has the correct format.""" + return ( + cpm_system_id.isdigit() + and len(cpm_system_id) == 12 + and cpm_system_id.startswith("2") + ) + + @property + def latest_number(self): + if self.id: + return int(self.id) + + +class PartyKeyId(CpmSystemId): + + @classmethod + def create(cls, current_number: int, ods_code: str): + current_number = current_number or FIRST_PARTY_KEY + latest_number = current_number + 1 + return cls( + __root__=f"{ods_code}-{latest_number:06d}", + ) + + @classmethod + def validate_cpm_system_id(cls, cpm_system_id: str) -> bool: + """Validate that the party key has the correct format.""" + try: + validate_key(key_value=cpm_system_id, key_type=ProductKeyType.PARTY_KEY) + except InvalidKeyPattern: + return False + return True + + @property + def latest_number(self): + if self.id: + return int(self.id.split("-")[1]) + + +class ProductId(CpmSystemId): + @classmethod + def create(cls): + """No current_id needed, key is generated randomly.""" + rng = random.Random(datetime.now().timestamp()) + product_id = "-".join( + "".join(rng.choices(PRODUCT_ID_VALID_CHARS, k=PRODUCT_ID_PART_LENGTH)) + for _ in range(PRODUCT_ID_NUMBER_OF_PARTS) + ) + if f"P.{product_id}" in cls.load_existing_ids(): + return cls.create() + return cls(__root__=f"P.{product_id}") + + @classmethod + def validate_cpm_system_id(cls, cpm_system_id: str) -> bool: + """Validate that the ProductId has the correct format.""" + return PRODUCT_ID_PATTERN.match(cpm_system_id) is not None + + @classmethod + def load_existing_ids(cls): + return _load_existing_ids() + + +class ProductTeamId(CpmSystemId): + @classmethod + def create(cls, ods_code: str = None): + if ods_code: + return cls(__root__=f"{ods_code}.{uuid4()}") + return cls(__root__=str(uuid4())) + + @classmethod + def validate_cpm_system_id(cls, cpm_system_id: str) -> bool: + """Validate that the product_team key has the correct format.""" + if "." in cpm_system_id: + return PRODUCT_TEAM_EPR_ID_PATTERN.match(cpm_system_id) is not None + return PRODUCT_TEAM_ID_PATTERN.match(cpm_system_id) is not None diff --git a/src/layers/domain/core/device/__init__.py b/archived_epr/src_old/layers/domain/core/device/__init__.py similarity index 100% rename from src/layers/domain/core/device/__init__.py rename to archived_epr/src_old/layers/domain/core/device/__init__.py diff --git a/src/layers/domain/core/device/tests/test_device_v1.py b/archived_epr/src_old/layers/domain/core/device/tests/test_device_v1.py similarity index 100% rename from src/layers/domain/core/device/tests/test_device_v1.py rename to archived_epr/src_old/layers/domain/core/device/tests/test_device_v1.py diff --git a/src/layers/domain/core/device/v1.py b/archived_epr/src_old/layers/domain/core/device/v1.py similarity index 100% rename from src/layers/domain/core/device/v1.py rename to archived_epr/src_old/layers/domain/core/device/v1.py diff --git a/src/layers/domain/core/device_id.py b/archived_epr/src_old/layers/domain/core/device_id.py similarity index 100% rename from src/layers/domain/core/device_id.py rename to archived_epr/src_old/layers/domain/core/device_id.py diff --git a/src/etl/sds/worker/bulk/extract_bulk/__init__.py b/archived_epr/src_old/layers/domain/core/device_key/__init__.py similarity index 100% rename from src/etl/sds/worker/bulk/extract_bulk/__init__.py rename to archived_epr/src_old/layers/domain/core/device_key/__init__.py diff --git a/src/layers/domain/core/device_key/tests/test_device_key_v1.py b/archived_epr/src_old/layers/domain/core/device_key/tests/test_device_key_v1.py similarity index 100% rename from src/layers/domain/core/device_key/tests/test_device_key_v1.py rename to archived_epr/src_old/layers/domain/core/device_key/tests/test_device_key_v1.py diff --git a/src/layers/domain/core/device_key/v1.py b/archived_epr/src_old/layers/domain/core/device_key/v1.py similarity index 100% rename from src/layers/domain/core/device_key/v1.py rename to archived_epr/src_old/layers/domain/core/device_key/v1.py diff --git a/src/layers/domain/core/device_reference_data/__init__.py b/archived_epr/src_old/layers/domain/core/device_reference_data/__init__.py similarity index 100% rename from src/layers/domain/core/device_reference_data/__init__.py rename to archived_epr/src_old/layers/domain/core/device_reference_data/__init__.py diff --git a/src/layers/domain/core/device_reference_data/tests/test_device_reference_data_v1.py b/archived_epr/src_old/layers/domain/core/device_reference_data/tests/test_device_reference_data_v1.py similarity index 100% rename from src/layers/domain/core/device_reference_data/tests/test_device_reference_data_v1.py rename to archived_epr/src_old/layers/domain/core/device_reference_data/tests/test_device_reference_data_v1.py diff --git a/src/layers/domain/core/device_reference_data/v1.py b/archived_epr/src_old/layers/domain/core/device_reference_data/v1.py similarity index 100% rename from src/layers/domain/core/device_reference_data/v1.py rename to archived_epr/src_old/layers/domain/core/device_reference_data/v1.py diff --git a/archived_epr/src_old/layers/domain/core/enum.py b/archived_epr/src_old/layers/domain/core/enum.py new file mode 100644 index 000000000..8d47c7ea8 --- /dev/null +++ b/archived_epr/src_old/layers/domain/core/enum.py @@ -0,0 +1,20 @@ +from enum import StrEnum, auto + + +class Status(StrEnum): + ACTIVE = auto() + INACTIVE = auto() # "soft" delete + + +class Environment(StrEnum): + DEV = auto() + QA = auto() + REF = auto() + INT = auto() + PROD = auto() + + +class EntityType(StrEnum): + PRODUCT_TEAM = auto() + PRODUCT_TEAM_ALIAS = auto() + PRODUCT = auto() diff --git a/src/layers/domain/repository/device_repository/__init__.py b/archived_epr/src_old/layers/domain/core/epr_product/__init__.py similarity index 100% rename from src/layers/domain/repository/device_repository/__init__.py rename to archived_epr/src_old/layers/domain/core/epr_product/__init__.py diff --git a/src/layers/domain/core/epr_product/tests/test_cpm_product_v1.py b/archived_epr/src_old/layers/domain/core/epr_product/tests/test_cpm_product_v1.py similarity index 100% rename from src/layers/domain/core/epr_product/tests/test_cpm_product_v1.py rename to archived_epr/src_old/layers/domain/core/epr_product/tests/test_cpm_product_v1.py diff --git a/src/layers/domain/core/epr_product/v1.py b/archived_epr/src_old/layers/domain/core/epr_product/v1.py similarity index 100% rename from src/layers/domain/core/epr_product/v1.py rename to archived_epr/src_old/layers/domain/core/epr_product/v1.py diff --git a/archived_epr/src_old/layers/domain/core/error.py b/archived_epr/src_old/layers/domain/core/error.py new file mode 100644 index 000000000..b21771e98 --- /dev/null +++ b/archived_epr/src_old/layers/domain/core/error.py @@ -0,0 +1,78 @@ +class DuplicateError(Exception): + pass + + +class NotFoundError(Exception): + pass + + +class ConflictError(Exception): + pass + + +class InvalidKeyPattern(ValueError): + pass + + +class InvalidProductTeamKeyError(ValueError): + pass + + +class InvalidKeyError(Exception): + pass + + +class InvalidProductIdError(Exception): + pass + + +class InvalidAccreditedSystemIdError(Exception): + pass + + +class InvalidResponseError(ValueError): + pass + + +class UnknownFields(Exception): + pass + + +class ImmutableFieldError(Exception): + pass + + +class EventUpdatedError(Exception): + pass + + +class EventExpected(Exception): + pass + + +class ConfigurationError(Exception): + pass + + +class NotCpmProductError(Exception): + pass + + +class NotEprProductError(Exception): + pass + + +class InvalidSpineMhsResponse(Exception): + pass + + +class InvalidSpineAsResponse(Exception): + pass + + +class AccreditedSystemFatalError(Exception): + pass + + +class DuplicateInteractionIdError(Exception): + pass diff --git a/archived_epr/src_old/layers/domain/core/event.py b/archived_epr/src_old/layers/domain/core/event.py new file mode 100644 index 000000000..c9a79e3b0 --- /dev/null +++ b/archived_epr/src_old/layers/domain/core/event.py @@ -0,0 +1,56 @@ +import re +from abc import ABC, ABCMeta + +from attr import has as is_dataclass + +ExportedEventTypeDef = dict[str, dict] +ExportedEventsTypeDef = list[ExportedEventTypeDef] + + +def _camel_case_to_snake_case(x: str) -> str: + return re.sub(r"(? Event: + ((event_name, event_data),) = exported_event.items() + for event_type in cls.event_types: + if event_name == event_type.public_name: + return event_type(**event_data) + raise NotImplementedError( + f"{cls.__name__}: not implemented parsing of {event_name}" + ) diff --git a/archived_epr/src_old/layers/domain/core/event_deserializer.py b/archived_epr/src_old/layers/domain/core/event_deserializer.py new file mode 100644 index 000000000..9467b28d2 --- /dev/null +++ b/archived_epr/src_old/layers/domain/core/event_deserializer.py @@ -0,0 +1,22 @@ +from domain.core.device.v1 import DeviceEventDeserializer +from domain.core.device_reference_data.v1 import DeviceReferenceDataEventDeserializer +from domain.core.epr_product.v1 import EprProductEventDeserializer +from domain.core.event import Event, ExportedEventTypeDef +from domain.core.product_team.v1 import ProductTeamEventDeserializer +from sds.epr.updates.etl_device import EtlDeviceEventDeserializer + + +def deserialize_event(event: ExportedEventTypeDef) -> Event: + exceptions = [] + for deserializer in ( + ProductTeamEventDeserializer, + EprProductEventDeserializer, + DeviceEventDeserializer, + DeviceReferenceDataEventDeserializer, + EtlDeviceEventDeserializer, + ): + try: + return (deserializer, deserializer.parse(event)) + except Exception as exception: + exceptions.append(exception) + raise ExceptionGroup(f"Could not deserialise {event}", exceptions) diff --git a/src/layers/domain/repository/product_team_epr_repository/__init__.py b/archived_epr/src_old/layers/domain/core/ods_organisation/__init__.py similarity index 100% rename from src/layers/domain/repository/product_team_epr_repository/__init__.py rename to archived_epr/src_old/layers/domain/core/ods_organisation/__init__.py diff --git a/archived_epr/src_old/layers/domain/core/ods_organisation/tests/test_ods_organisation_v1.py b/archived_epr/src_old/layers/domain/core/ods_organisation/tests/test_ods_organisation_v1.py new file mode 100644 index 000000000..638b1fae9 --- /dev/null +++ b/archived_epr/src_old/layers/domain/core/ods_organisation/tests/test_ods_organisation_v1.py @@ -0,0 +1,26 @@ +from contextlib import nullcontext as do_not_raise + +import pytest +from domain.core.root import Root +from pydantic import ValidationError + + +@pytest.mark.parametrize( + "ods_code", + ["F5H1R", "RTG09", "NLF02", "D82007002"], +) +def test__can_instantiate_ods_organisation(ods_code: str): + with do_not_raise(): + Root.create_ods_organisation(ods_code=ods_code) + + +@pytest.mark.parametrize( + "ods_code", + [ + "ABCDEFGHIJ", + "!@£$%", + ], +) +def test__id_must_be_valid_ods_code(ods_code: str): + with pytest.raises(ValidationError): + Root.create_ods_organisation(ods_code=ods_code) diff --git a/archived_epr/src_old/layers/domain/core/ods_organisation/v1.py b/archived_epr/src_old/layers/domain/core/ods_organisation/v1.py new file mode 100644 index 000000000..fbc9bc022 --- /dev/null +++ b/archived_epr/src_old/layers/domain/core/ods_organisation/v1.py @@ -0,0 +1,34 @@ +from domain.core.aggregate_root import AggregateRoot +from domain.core.product_team import ProductTeam, ProductTeamCreatedEvent +from domain.core.product_team_epr import ProductTeam as ProductTeamEpr +from domain.core.product_team_epr import ( + ProductTeamCreatedEvent as ProductTeamCreatedEventEpr, +) +from domain.core.validation import ODS_CODE_REGEX +from pydantic import Field + + +class OdsOrganisation(AggregateRoot): + """ + An object that maps onto the Organisational Data Service (ODS) definition + of an "Organisation". We are only interested in a sub-set of the fields + they hold. + """ + + ods_code: str = Field(regex=ODS_CODE_REGEX) + + def create_product_team(self, name: str, keys: list = None) -> ProductTeam: + keys = keys or [] + product_team = ProductTeam(name=name, ods_code=self.ods_code, keys=keys) + event = ProductTeamCreatedEvent(**product_team.state()) + product_team.add_event(event) + self.add_event(event=event) + return product_team + + def create_product_team_epr(self, name: str, keys: list = None) -> ProductTeamEpr: + keys = keys or [] + product_team = ProductTeamEpr(name=name, ods_code=self.ods_code, keys=keys) + event = ProductTeamCreatedEventEpr(**product_team.state()) + product_team.add_event(event) + self.add_event(event=event) + return product_team diff --git a/src/layers/domain/core/questionnaire/__init__.py b/archived_epr/src_old/layers/domain/core/product_key/__init__.py similarity index 100% rename from src/layers/domain/core/questionnaire/__init__.py rename to archived_epr/src_old/layers/domain/core/product_key/__init__.py diff --git a/archived_epr/src_old/layers/domain/core/product_key/v1.py b/archived_epr/src_old/layers/domain/core/product_key/v1.py new file mode 100644 index 000000000..4abaa1aa5 --- /dev/null +++ b/archived_epr/src_old/layers/domain/core/product_key/v1.py @@ -0,0 +1,21 @@ +import re +from enum import StrEnum, auto + +from domain.core.device_key import DeviceKey +from domain.core.validation import SdsId + + +class ProductKeyType(StrEnum): + PARTY_KEY = auto() + + @property + def pattern(self) -> re.Pattern: + # To the developer: if adding more patterns then please refer to DeviceKeyType.pattern for guidance + return SdsId.PartyKey.ID_PATTERN + + +class ProductKey(DeviceKey): + """A Product Key is a secondary way of indexing / retrieving Products""" + + key_type: ProductKeyType + key_value: str diff --git a/src/layers/domain/repository/repository/__init__.py b/archived_epr/src_old/layers/domain/core/product_team/__init__.py similarity index 100% rename from src/layers/domain/repository/repository/__init__.py rename to archived_epr/src_old/layers/domain/core/product_team/__init__.py diff --git a/archived_epr/src_old/layers/domain/core/product_team/tests/test_product_team_v1.py b/archived_epr/src_old/layers/domain/core/product_team/tests/test_product_team_v1.py new file mode 100644 index 000000000..27d174e6a --- /dev/null +++ b/archived_epr/src_old/layers/domain/core/product_team/tests/test_product_team_v1.py @@ -0,0 +1,298 @@ +import re + +import pytest +from domain.core.product_team import ProductTeam, ProductTeamCreatedEvent +from domain.core.root import Root +from domain.request_models import CreateProductTeamIncomingParams +from pydantic import ValidationError + + +@pytest.mark.parametrize( + "keys,name", + [ + [ + [ + { + "key_type": "product_team_id_alias", + "key_value": "ae28e872-843d-4e2e-9f0b-b5d3c42d441f", + } + ], + " ", + ], + ], +) +def test__create_product_team_bad_name(keys: list, name: str): + org = Root.create_ods_organisation(ods_code="AB123") + + with pytest.raises(ValidationError): + org.create_product_team(name=name, keys=keys) + + +@pytest.mark.parametrize( + "keys,name", + [ + [ + [ + { + "key_type": "product_team_id", + "key_value": "ae28e872-843d-4e2e-9f0b-b5d3c42d441f", + } + ], + "FOOBAR", + ], + ], +) +def test__create_product_team_bad_key_type(keys: list, name: str): + org = Root.create_ods_organisation(ods_code="AB123") + + with pytest.raises(ValidationError): + org.create_product_team(name=name, keys=keys) + + +@pytest.mark.parametrize( + "keys,name,ods_code", + [ + [ + [ + { + "key_type": "product_team_id_alias", + "key_value": "ae28e872-843d-4e2e-9f0b-b5d3c42d441f", + } + ], + "First", + "AB123", + ], + [ + [ + { + "key_type": "product_team_id_alias", + "key_value": "edf90c3a-f865-4dd9-9ab9-400e6ebc02e0", + } + ], + "Second", + "AB123", + ], + [ + [ + { + "key_type": "product_team_id_alias", + "key_value": "f9518c12-6c83-4544-97db-d9dd1d64da97", + } + ], + "Third", + "AB123", + ], + [ + [{"key_type": "product_team_id_alias", "key_value": "foobar"}], + "Fourth", + "AB123", + ], + ], +) +def test__create_product_team( + keys: list, + name: str, + ods_code: str, +): + product_team = ProductTeam(name=name, ods_code=ods_code, keys=keys) + generated_id = product_team.id + assert isinstance(product_team, ProductTeam) + assert re.match(r"[0-9a-fA-F-]{36}", generated_id) + assert product_team.keys == keys + assert product_team.name == name + assert product_team.ods_code == ods_code + + +@pytest.mark.parametrize( + "id,keys,name,ods_code", + [ + [ + None, + [ + { + "key_type": "product_team_id_alias", + "key_value": "ae28e872-843d-4e2e-9f0b-b5d3c42d441f", + } + ], + "First", + "AB123", + ], + [ + None, + [ + { + "key_type": "product_team_id_alias", + "key_value": "edf90c3a-f865-4dd9-9ab9-400e6ebc02e0", + } + ], + "Second", + "AB123", + ], + [ + None, + [ + { + "key_type": "product_team_id_alias", + "key_value": "f9518c12-6c83-4544-97db-d9dd1d64da97", + } + ], + "Third", + "AB123", + ], + [ + None, + [{"key_type": "product_team_id_alias", "key_value": "foobar"}], + "Fourth", + "AB123", + ], + ], +) +def test__create_product_team_provided_id_equals_none_is_ignored( + id: str, + keys: list, + name: str, + ods_code: str, +): + product_team = ProductTeam(id=id, name=name, ods_code=ods_code, keys=keys) + generated_id = product_team.id + assert isinstance(product_team, ProductTeam) + assert re.match(r"[0-9a-fA-F-]{36}", generated_id) + assert product_team.keys == keys + assert product_team.name == name + assert product_team.ods_code == ods_code + + +@pytest.mark.parametrize( + "keys,name", + [ + [ + [ + { + "key_type": "product_team_id_alias", + "key_value": "ae28e872-843d-4e2e-9f0b-b5d3c42d441f", + } + ], + "First", + ], + [ + [ + { + "key_type": "product_team_id_alias", + "key_value": "edf90c3a-f865-4dd9-9ab9-400e6ebc02e0", + } + ], + "Second", + ], + [ + [ + { + "key_type": "product_team_id_alias", + "key_value": "f9518c12-6c83-4544-97db-d9dd1d64da97", + } + ], + "Third", + ], + [[{"key_type": "product_team_id_alias", "key_value": "foobar"}], "Fourth"], + ], +) +def test__create_product_team_from_org_no_id( + keys: str, + name: str, +): + org = Root.create_ods_organisation(ods_code="AB123") + + result = org.create_product_team(keys=keys, name=name) + event = result.events[0] + + assert isinstance(result, ProductTeam) + generated_id = result.id + assert re.match(r"[0-9a-fA-F-]{36}", generated_id) + assert result.keys == keys + assert result.name == name + assert result.ods_code == org.ods_code + + generated_id = event.id + assert len(result.events) == 1 + assert isinstance(event, ProductTeamCreatedEvent) + assert re.match(r"[0-9a-fA-F-]{36}", generated_id) + assert event.keys == keys + assert event.name == name + assert event.ods_code == org.ods_code + + +@pytest.mark.parametrize( + "id,keys,name", + [ + [ + None, + [ + { + "key_type": "product_team_id_alias", + "key_value": "ae28e872-843d-4e2e-9f0b-b5d3c42d441f", + } + ], + "First", + ], + [ + "FOOBAR", + [ + { + "key_type": "product_team_id_alias", + "key_value": "edf90c3a-f865-4dd9-9ab9-400e6ebc02e0", + } + ], + "Second", + ], + ], +) +def test__create_product_team_from_org_id_raises_error( + id: str, + keys: str, + name: str, +): + org = Root.create_ods_organisation(ods_code="AB123") + + with pytest.raises(TypeError): + org.create_product_team(id=id, name=name, keys=keys) + + +@pytest.mark.parametrize( + "params", + [ + { + "keys": [ + { + "key_type": "product_team_id_alias", + "key_value": "edf90c3a-f865-4dd9-9ab9-400e6ebc02e0", + } + ], + "ods_code": "AB123", + "name": "FooBar", + }, + ], +) +def test__create_product_team_with_incoming_params( + params: dict, +): + incoming_params = CreateProductTeamIncomingParams(**params) + org = Root.create_ods_organisation(incoming_params.ods_code) + + result = org.create_product_team(**incoming_params.dict(exclude={"ods_code"})) + event = result.events[0] + + assert isinstance(result, ProductTeam) + assert len(result.events) == 1 + assert isinstance(event, ProductTeamCreatedEvent) + + result_generated_id = result.id + event_generated_id = event.id + assert re.match(r"[0-9a-fA-F-]{36}", result_generated_id) + assert re.match(r"[0-9a-fA-F-]{36}", event_generated_id) + + assert result.keys == params["keys"] + assert result.name == params["name"] + assert result.ods_code == org.ods_code + + assert event.keys == params["keys"] + assert event.name == params["name"] + assert event.ods_code == org.ods_code diff --git a/archived_epr/src_old/layers/domain/core/product_team/v1.py b/archived_epr/src_old/layers/domain/core/product_team/v1.py new file mode 100644 index 000000000..d5387c630 --- /dev/null +++ b/archived_epr/src_old/layers/domain/core/product_team/v1.py @@ -0,0 +1,86 @@ +from datetime import datetime + +from attr import dataclass +from domain.core.aggregate_root import AggregateRoot +from domain.core.cpm_product import CpmProduct, CpmProductCreatedEvent +from domain.core.cpm_system_id import ProductTeamId +from domain.core.device import event +from domain.core.enum import Status +from domain.core.event import Event, EventDeserializer +from domain.core.product_team_key import ProductTeamKey +from domain.core.timestamp import now +from domain.core.validation import ENTITY_NAME_REGEX +from pydantic import Field, root_validator + + +@dataclass(kw_only=True, slots=True) +class ProductTeamCreatedEvent(Event): + id: str + name: str + ods_code: str + status: Status + created_on: str + updated_on: str = None + deleted_on: str = None + keys: list[ProductTeamKey] = Field(default_factory=list) + + +@dataclass(kw_only=True, slots=True) +class ProductTeamDeletedEvent(Event): + id: str + name: str + ods_code: str + status: Status + created_on: str + updated_on: str + deleted_on: str + keys: list[ProductTeamKey] = Field(default_factory=list) + + +class ProductTeam(AggregateRoot): + """ + A ProductTeam is the entity that owns Products, and is derived from ODS + Organisations. A single ODS Organisation can be mapped onto multiple + ProductTeams, meaning that `ods_code` is not unique amongst ProductTeams. + """ + + id: str = None + name: str = Field(regex=ENTITY_NAME_REGEX) + ods_code: str + status: Status = Status.ACTIVE + created_on: datetime = Field(default_factory=now, immutable=True) + updated_on: datetime = Field(default=None) + deleted_on: datetime = Field(default=None) + keys: list[ProductTeamKey] = Field(default_factory=list) + + @root_validator(pre=True) + def set_id(cls, values): + if not values.get("id"): + product_team = ProductTeamId.create() + values["id"] = product_team.id + return values + + def create_cpm_product(self, name: str, product_id: str = None) -> CpmProduct: + extra_kwargs = {"id": product_id} if product_id is not None else {} + product = CpmProduct( + product_team_id=self.id, name=name, ods_code=self.ods_code, **extra_kwargs + ) + data = product.state() + del data["keys"] + product_created_event = CpmProductCreatedEvent(**data) + product.add_event(product_created_event) + return product + + @event + def delete(self): + deleted_on = now() + product_team_data = self._update( + data=dict( + status=Status.INACTIVE, updated_on=deleted_on, deleted_on=deleted_on + ) + ) + return ProductTeamDeletedEvent(**product_team_data) + + +class ProductTeamEventDeserializer(EventDeserializer): + event_types = (ProductTeamCreatedEvent, ProductTeamDeletedEvent) diff --git a/archived_epr/src_old/layers/domain/core/product_team_epr/__init__.py b/archived_epr/src_old/layers/domain/core/product_team_epr/__init__.py new file mode 100644 index 000000000..e1ddb07c7 --- /dev/null +++ b/archived_epr/src_old/layers/domain/core/product_team_epr/__init__.py @@ -0,0 +1 @@ +from .v1 import * # noqa: F403, F401 diff --git a/src/layers/domain/core/product_team_epr/tests/test_product_team_v1.py b/archived_epr/src_old/layers/domain/core/product_team_epr/tests/test_product_team_v1.py similarity index 100% rename from src/layers/domain/core/product_team_epr/tests/test_product_team_v1.py rename to archived_epr/src_old/layers/domain/core/product_team_epr/tests/test_product_team_v1.py diff --git a/src/layers/domain/core/product_team_epr/v1.py b/archived_epr/src_old/layers/domain/core/product_team_epr/v1.py similarity index 100% rename from src/layers/domain/core/product_team_epr/v1.py rename to archived_epr/src_old/layers/domain/core/product_team_epr/v1.py diff --git a/archived_epr/src_old/layers/domain/core/product_team_key/__init__.py b/archived_epr/src_old/layers/domain/core/product_team_key/__init__.py new file mode 100644 index 000000000..e1ddb07c7 --- /dev/null +++ b/archived_epr/src_old/layers/domain/core/product_team_key/__init__.py @@ -0,0 +1 @@ +from .v1 import * # noqa: F403, F401 diff --git a/archived_epr/src_old/layers/domain/core/product_team_key/v1.py b/archived_epr/src_old/layers/domain/core/product_team_key/v1.py new file mode 100644 index 000000000..ae355417b --- /dev/null +++ b/archived_epr/src_old/layers/domain/core/product_team_key/v1.py @@ -0,0 +1,29 @@ +import re +from enum import StrEnum, auto + +from domain.core.device_key import DeviceKey +from domain.core.validation import CpmId + + +class ProductTeamKeyType(StrEnum): + PRODUCT_TEAM_ID_ALIAS = auto() + EPR_ID = auto() + + @property + def pattern(self) -> re.Pattern: + match self: + case ProductTeamKeyType.PRODUCT_TEAM_ID_ALIAS: + return CpmId.ProductTeamIdAlias.ID_PATTERN + case ProductTeamKeyType.EPR_ID: + return CpmId.EprId.ID_PATTERN + case _: + raise NotImplementedError(f"No ID validation configured for '{self}'") + + +class ProductTeamKey(DeviceKey): + """ + A ProductTeam Key is a secondary way of indexing / retrieving Product Teams + """ + + key_type: ProductTeamKeyType + key_value: str diff --git a/src/layers/domain/repository/device_reference_data_repository/__init__.py b/archived_epr/src_old/layers/domain/core/questionnaire/__init__.py similarity index 100% rename from src/layers/domain/repository/device_reference_data_repository/__init__.py rename to archived_epr/src_old/layers/domain/core/questionnaire/__init__.py diff --git a/src/layers/domain/core/questionnaire/tests/test_questionnaire_v1.py b/archived_epr/src_old/layers/domain/core/questionnaire/tests/test_questionnaire_v1.py similarity index 100% rename from src/layers/domain/core/questionnaire/tests/test_questionnaire_v1.py rename to archived_epr/src_old/layers/domain/core/questionnaire/tests/test_questionnaire_v1.py diff --git a/src/layers/domain/core/questionnaire/v1.py b/archived_epr/src_old/layers/domain/core/questionnaire/v1.py similarity index 100% rename from src/layers/domain/core/questionnaire/v1.py rename to archived_epr/src_old/layers/domain/core/questionnaire/v1.py diff --git a/archived_epr/src_old/layers/domain/core/root/__init__.py b/archived_epr/src_old/layers/domain/core/root/__init__.py new file mode 100644 index 000000000..e1ddb07c7 --- /dev/null +++ b/archived_epr/src_old/layers/domain/core/root/__init__.py @@ -0,0 +1 @@ +from .v1 import * # noqa: F403, F401 diff --git a/archived_epr/src_old/layers/domain/core/root/tests/test_root_v1.py b/archived_epr/src_old/layers/domain/core/root/tests/test_root_v1.py new file mode 100644 index 000000000..d4d163230 --- /dev/null +++ b/archived_epr/src_old/layers/domain/core/root/tests/test_root_v1.py @@ -0,0 +1,7 @@ +from domain.core.ods_organisation import OdsOrganisation +from domain.core.root import Root + + +def test_create_ods_organisation(): + org = Root.create_ods_organisation(ods_code="ABC") + assert isinstance(org, OdsOrganisation) diff --git a/archived_epr/src_old/layers/domain/core/root/v1.py b/archived_epr/src_old/layers/domain/core/root/v1.py new file mode 100644 index 000000000..7c0a77646 --- /dev/null +++ b/archived_epr/src_old/layers/domain/core/root/v1.py @@ -0,0 +1,13 @@ +from domain.core.ods_organisation import OdsOrganisation + + +class Root: + """ + Domain entities that have no parent are created by this Root entity, in + order to preserve the rule that all Aggregate Roots are created by other + Aggregate Roots. + """ + + @staticmethod + def create_ods_organisation(ods_code: str) -> OdsOrganisation: + return OdsOrganisation(ods_code=ods_code) diff --git a/archived_epr/src_old/layers/domain/core/tests/test_aggregate_root.py b/archived_epr/src_old/layers/domain/core/tests/test_aggregate_root.py new file mode 100644 index 000000000..535569d5d --- /dev/null +++ b/archived_epr/src_old/layers/domain/core/tests/test_aggregate_root.py @@ -0,0 +1,88 @@ +import pytest +from attr import dataclass +from domain.core.aggregate_root import AggregateRoot +from domain.core.error import ImmutableFieldError, UnknownFields +from domain.core.event import Event +from pydantic import Field, ValidationError + + +def test_export_events(): + @dataclass + class MyEvent(Event): + my_value: str + + @dataclass + class MyOtherEvent(Event): + my_value: str + my_other_value: str + + root = AggregateRoot() + root.add_event(MyEvent(my_value="foo")) + root.add_event(MyOtherEvent(my_value="foo", my_other_value="bar")) + root.add_event(MyEvent(my_value="bar")) + + assert root.export_events() == [ + {"my_event": {"my_value": "foo"}}, + {"my_other_event": {"my_value": "foo", "my_other_value": "bar"}}, + {"my_event": {"my_value": "bar"}}, + ] + + +def test_model_fields(): + class MyModel(AggregateRoot): + field_1: str + field_2: str = Field(exclude=True) + field_3: int + + my_model = MyModel(field_1="foo", field_2="bar", field_3=1) + assert my_model.model_fields == {"field_1", "field_3"} + + +def test_immutable_fields(): + class MyModel(AggregateRoot): + field_1: str + field_2: str = Field(immutable=True, exclude=True) + field_3: int = Field(immutable=True) + field_4: int + + my_model = MyModel(field_1="foo", field_2="bar", field_3=1, field_4=2) + assert my_model.immutable_fields == {"field_2", "field_3"} + + +def test__update(): + class MyModel(AggregateRoot): + field_1: str + field_2: str + field_3: int + + my_model = MyModel(field_1="foo", field_2="bar", field_3=1) + data = my_model._update({"field_2": "BAR", "field_3": 23}) + + assert data == {"field_1": "foo", "field_2": "BAR", "field_3": 23} + + +def test__update_unknown_field(): + class MyModel(AggregateRoot): + pass + + my_model = MyModel() + with pytest.raises(UnknownFields): + my_model._update({"field_2": "BAR"}) + + +def test__update_immutable_field_error(): + class MyModel(AggregateRoot): + field_1: str = Field(immutable=True) + + my_model = MyModel(field_1="bar") + with pytest.raises(ImmutableFieldError): + my_model._update({"field_1": "BAR"}) + + +def test__update_bad_data(): + class MyModel(AggregateRoot): + field_1: int + + my_model = MyModel(field_1=123) + with pytest.raises(ValidationError): + my_model._update({"field_1": "BAR"}) diff --git a/archived_epr/src_old/layers/domain/core/tests/test_event.py b/archived_epr/src_old/layers/domain/core/tests/test_event.py new file mode 100644 index 000000000..0c930844e --- /dev/null +++ b/archived_epr/src_old/layers/domain/core/tests/test_event.py @@ -0,0 +1,9 @@ +from domain.core.event import Event + + +def test_public_name(): + class MyEvent(Event): + pass + + assert MyEvent.public_name == "my_event" + assert MyEvent().public_name == "my_event" diff --git a/src/layers/domain/core/tests/test_product_id.py b/archived_epr/src_old/layers/domain/core/tests/test_product_id.py similarity index 100% rename from src/layers/domain/core/tests/test_product_id.py rename to archived_epr/src_old/layers/domain/core/tests/test_product_id.py diff --git a/archived_epr/src_old/layers/domain/core/tests/test_timestamp.py b/archived_epr/src_old/layers/domain/core/tests/test_timestamp.py new file mode 100644 index 000000000..fb7d2c39f --- /dev/null +++ b/archived_epr/src_old/layers/domain/core/tests/test_timestamp.py @@ -0,0 +1,12 @@ +from datetime import date, datetime + +from domain.core.timestamp import now +from pydantic import BaseModel + + +def test_now(): + class MyModel(BaseModel): + my_date: datetime + + my_model = MyModel(my_date=now()) + assert my_model.my_date.date() == date.today() diff --git a/archived_epr/src_old/layers/domain/core/timestamp.py b/archived_epr/src_old/layers/domain/core/timestamp.py new file mode 100644 index 000000000..9e0bb4491 --- /dev/null +++ b/archived_epr/src_old/layers/domain/core/timestamp.py @@ -0,0 +1,5 @@ +from datetime import datetime, timezone + + +def now(): + return datetime.now(timezone.utc) diff --git a/archived_epr/src_old/layers/domain/core/validation.py b/archived_epr/src_old/layers/domain/core/validation.py new file mode 100644 index 000000000..018abc757 --- /dev/null +++ b/archived_epr/src_old/layers/domain/core/validation.py @@ -0,0 +1,39 @@ +import re + +DEVICE_KEY_SEPARATOR = ":" + +_ODS_CODE_REGEX = r"[a-zA-Z0-9]{1,9}" +UUID_REGEX = ( + r"^[0-9a-fA-F]{8}-[0-9a-fA-F]{4}-[0-9a-fA-F]{4}-[0-9a-fA-F]{4}-[0-9a-fA-F]{12}$" +) +ODS_CODE_REGEX = rf"^({_ODS_CODE_REGEX})$" +ENTITY_NAME_REGEX = r"^\S+( \S+)*$" +DEVICE_NAME_REGEX = r"^[ -~]+$" # any sequence of ascii +CPM_PRODUCT_NAME_REGEX = r"^[ -~]+$" # any sequence of ascii + + +class CpmId: + class Product: + PRODUCT_ID_CHARS = "ACDEFGHJKLMNPRTUVWXY34679" + ID_PATTERN = re.compile( + rf"^P\.[{PRODUCT_ID_CHARS}]{{3}}-[{PRODUCT_ID_CHARS}]{{3}}$" + ) + + class ProductTeamIdAlias: + ID_PATTERN = re.compile(rf"^[ -~]+$") + + class EprId: + ID_PATTERN = re.compile(rf"^EPR-{_ODS_CODE_REGEX}$") + + +class SdsId: + class AccreditedSystem: + ID_PATTERN = re.compile(rf"^[a-zA-Z-0-9]+$") + + class PartyKey: + PARTY_KEY_REGEX = rf"^{_ODS_CODE_REGEX}-[0-9]{{5,9}}$" + ID_PATTERN = re.compile(PARTY_KEY_REGEX) + + class CpaId: + CPA_ID_REGEX = rf"^[a-zA-Z0-9\-\:\_]+$" + ID_PATTERN = re.compile(CPA_ID_REGEX) diff --git a/src/layers/domain/repository/epr_product_repository/__init__.py b/archived_epr/src_old/layers/domain/repository/device_reference_data_repository/__init__.py similarity index 100% rename from src/layers/domain/repository/epr_product_repository/__init__.py rename to archived_epr/src_old/layers/domain/repository/device_reference_data_repository/__init__.py diff --git a/src/layers/domain/repository/device_reference_data_repository/tests/conftest.py b/archived_epr/src_old/layers/domain/repository/device_reference_data_repository/tests/conftest.py similarity index 100% rename from src/layers/domain/repository/device_reference_data_repository/tests/conftest.py rename to archived_epr/src_old/layers/domain/repository/device_reference_data_repository/tests/conftest.py diff --git a/src/layers/domain/repository/device_reference_data_repository/tests/test_device_reference_data_repository_v1.py b/archived_epr/src_old/layers/domain/repository/device_reference_data_repository/tests/test_device_reference_data_repository_v1.py similarity index 100% rename from src/layers/domain/repository/device_reference_data_repository/tests/test_device_reference_data_repository_v1.py rename to archived_epr/src_old/layers/domain/repository/device_reference_data_repository/tests/test_device_reference_data_repository_v1.py diff --git a/src/layers/domain/repository/device_reference_data_repository/v1.py b/archived_epr/src_old/layers/domain/repository/device_reference_data_repository/v1.py similarity index 100% rename from src/layers/domain/repository/device_reference_data_repository/v1.py rename to archived_epr/src_old/layers/domain/repository/device_reference_data_repository/v1.py diff --git a/archived_epr/src_old/layers/domain/repository/device_repository/__init__.py b/archived_epr/src_old/layers/domain/repository/device_repository/__init__.py new file mode 100644 index 000000000..e1ddb07c7 --- /dev/null +++ b/archived_epr/src_old/layers/domain/repository/device_repository/__init__.py @@ -0,0 +1 @@ +from .v1 import * # noqa: F403, F401 diff --git a/src/layers/domain/repository/device_repository/tests/utils.py b/archived_epr/src_old/layers/domain/repository/device_repository/tests/utils.py similarity index 100% rename from src/layers/domain/repository/device_repository/tests/utils.py rename to archived_epr/src_old/layers/domain/repository/device_repository/tests/utils.py diff --git a/src/layers/domain/repository/device_repository/tests/v1/conftest.py b/archived_epr/src_old/layers/domain/repository/device_repository/tests/v1/conftest.py similarity index 100% rename from src/layers/domain/repository/device_repository/tests/v1/conftest.py rename to archived_epr/src_old/layers/domain/repository/device_repository/tests/v1/conftest.py diff --git a/src/layers/domain/repository/device_repository/tests/v1/test_device_repository_keys_v1.py b/archived_epr/src_old/layers/domain/repository/device_repository/tests/v1/test_device_repository_keys_v1.py similarity index 100% rename from src/layers/domain/repository/device_repository/tests/v1/test_device_repository_keys_v1.py rename to archived_epr/src_old/layers/domain/repository/device_repository/tests/v1/test_device_repository_keys_v1.py diff --git a/src/layers/domain/repository/device_repository/tests/v1/test_device_repository_questionnaire_responses_v1.py b/archived_epr/src_old/layers/domain/repository/device_repository/tests/v1/test_device_repository_questionnaire_responses_v1.py similarity index 100% rename from src/layers/domain/repository/device_repository/tests/v1/test_device_repository_questionnaire_responses_v1.py rename to archived_epr/src_old/layers/domain/repository/device_repository/tests/v1/test_device_repository_questionnaire_responses_v1.py diff --git a/src/layers/domain/repository/device_repository/tests/v1/test_device_repository_tags_v1.py b/archived_epr/src_old/layers/domain/repository/device_repository/tests/v1/test_device_repository_tags_v1.py similarity index 100% rename from src/layers/domain/repository/device_repository/tests/v1/test_device_repository_tags_v1.py rename to archived_epr/src_old/layers/domain/repository/device_repository/tests/v1/test_device_repository_tags_v1.py diff --git a/src/layers/domain/repository/device_repository/tests/v1/test_device_repository_v1.py b/archived_epr/src_old/layers/domain/repository/device_repository/tests/v1/test_device_repository_v1.py similarity index 100% rename from src/layers/domain/repository/device_repository/tests/v1/test_device_repository_v1.py rename to archived_epr/src_old/layers/domain/repository/device_repository/tests/v1/test_device_repository_v1.py diff --git a/src/layers/domain/repository/device_repository/tests/v1/test_device_repository_v1_compression.py b/archived_epr/src_old/layers/domain/repository/device_repository/tests/v1/test_device_repository_v1_compression.py similarity index 100% rename from src/layers/domain/repository/device_repository/tests/v1/test_device_repository_v1_compression.py rename to archived_epr/src_old/layers/domain/repository/device_repository/tests/v1/test_device_repository_v1_compression.py diff --git a/src/layers/domain/repository/device_repository/v1.py b/archived_epr/src_old/layers/domain/repository/device_repository/v1.py similarity index 100% rename from src/layers/domain/repository/device_repository/v1.py rename to archived_epr/src_old/layers/domain/repository/device_repository/v1.py diff --git a/archived_epr/src_old/layers/domain/repository/epr_product_repository/__init__.py b/archived_epr/src_old/layers/domain/repository/epr_product_repository/__init__.py new file mode 100644 index 000000000..e0d08e678 --- /dev/null +++ b/archived_epr/src_old/layers/domain/repository/epr_product_repository/__init__.py @@ -0,0 +1 @@ +from .v1 import * # noqa diff --git a/src/layers/domain/repository/epr_product_repository/tests/v1/conftest.py b/archived_epr/src_old/layers/domain/repository/epr_product_repository/tests/v1/conftest.py similarity index 100% rename from src/layers/domain/repository/epr_product_repository/tests/v1/conftest.py rename to archived_epr/src_old/layers/domain/repository/epr_product_repository/tests/v1/conftest.py diff --git a/src/layers/domain/repository/epr_product_repository/tests/v1/test_epr_product_repository_keys_v1.py b/archived_epr/src_old/layers/domain/repository/epr_product_repository/tests/v1/test_epr_product_repository_keys_v1.py similarity index 100% rename from src/layers/domain/repository/epr_product_repository/tests/v1/test_epr_product_repository_keys_v1.py rename to archived_epr/src_old/layers/domain/repository/epr_product_repository/tests/v1/test_epr_product_repository_keys_v1.py diff --git a/src/layers/domain/repository/epr_product_repository/tests/v1/test_epr_product_repository_v1.py b/archived_epr/src_old/layers/domain/repository/epr_product_repository/tests/v1/test_epr_product_repository_v1.py similarity index 100% rename from src/layers/domain/repository/epr_product_repository/tests/v1/test_epr_product_repository_v1.py rename to archived_epr/src_old/layers/domain/repository/epr_product_repository/tests/v1/test_epr_product_repository_v1.py diff --git a/src/layers/domain/repository/epr_product_repository/tests/v1/test_epr_product_repository_v1_delete.py b/archived_epr/src_old/layers/domain/repository/epr_product_repository/tests/v1/test_epr_product_repository_v1_delete.py similarity index 100% rename from src/layers/domain/repository/epr_product_repository/tests/v1/test_epr_product_repository_v1_delete.py rename to archived_epr/src_old/layers/domain/repository/epr_product_repository/tests/v1/test_epr_product_repository_v1_delete.py diff --git a/src/layers/domain/repository/epr_product_repository/v1.py b/archived_epr/src_old/layers/domain/repository/epr_product_repository/v1.py similarity index 100% rename from src/layers/domain/repository/epr_product_repository/v1.py rename to archived_epr/src_old/layers/domain/repository/epr_product_repository/v1.py diff --git a/archived_epr/src_old/layers/domain/repository/product_team_epr_repository/__init__.py b/archived_epr/src_old/layers/domain/repository/product_team_epr_repository/__init__.py new file mode 100644 index 000000000..e1ddb07c7 --- /dev/null +++ b/archived_epr/src_old/layers/domain/repository/product_team_epr_repository/__init__.py @@ -0,0 +1 @@ +from .v1 import * # noqa: F403, F401 diff --git a/src/layers/domain/repository/product_team_epr_repository/tests/test_product_team_repository_v1.py b/archived_epr/src_old/layers/domain/repository/product_team_epr_repository/tests/test_product_team_repository_v1.py similarity index 100% rename from src/layers/domain/repository/product_team_epr_repository/tests/test_product_team_repository_v1.py rename to archived_epr/src_old/layers/domain/repository/product_team_epr_repository/tests/test_product_team_repository_v1.py diff --git a/src/layers/domain/repository/product_team_epr_repository/v1.py b/archived_epr/src_old/layers/domain/repository/product_team_epr_repository/v1.py similarity index 100% rename from src/layers/domain/repository/product_team_epr_repository/v1.py rename to archived_epr/src_old/layers/domain/repository/product_team_epr_repository/v1.py diff --git a/src/layers/domain/repository/questionnaire_repository/__init__.py b/archived_epr/src_old/layers/domain/repository/questionnaire_repository/__init__.py similarity index 100% rename from src/layers/domain/repository/questionnaire_repository/__init__.py rename to archived_epr/src_old/layers/domain/repository/questionnaire_repository/__init__.py diff --git a/src/layers/domain/repository/questionnaire_repository/tests/test_questionnaire_repository_v1.py b/archived_epr/src_old/layers/domain/repository/questionnaire_repository/tests/test_questionnaire_repository_v1.py similarity index 100% rename from src/layers/domain/repository/questionnaire_repository/tests/test_questionnaire_repository_v1.py rename to archived_epr/src_old/layers/domain/repository/questionnaire_repository/tests/test_questionnaire_repository_v1.py diff --git a/src/layers/domain/repository/questionnaire_repository/v1/questionnaire_repository.py b/archived_epr/src_old/layers/domain/repository/questionnaire_repository/v1/questionnaire_repository.py similarity index 100% rename from src/layers/domain/repository/questionnaire_repository/v1/questionnaire_repository.py rename to archived_epr/src_old/layers/domain/repository/questionnaire_repository/v1/questionnaire_repository.py diff --git a/src/layers/domain/repository/questionnaire_repository/v1/questionnaires/__init__.py b/archived_epr/src_old/layers/domain/repository/questionnaire_repository/v1/questionnaires/__init__.py similarity index 100% rename from src/layers/domain/repository/questionnaire_repository/v1/questionnaires/__init__.py rename to archived_epr/src_old/layers/domain/repository/questionnaire_repository/v1/questionnaires/__init__.py diff --git a/src/layers/domain/repository/questionnaire_repository/v1/tests/test_spine_questionnaires.py b/archived_epr/src_old/layers/domain/repository/questionnaire_repository/v1/tests/test_spine_questionnaires.py similarity index 100% rename from src/layers/domain/repository/questionnaire_repository/v1/tests/test_spine_questionnaires.py rename to archived_epr/src_old/layers/domain/repository/questionnaire_repository/v1/tests/test_spine_questionnaires.py diff --git a/archived_epr/src_old/layers/domain/repository/repository/__init__.py b/archived_epr/src_old/layers/domain/repository/repository/__init__.py new file mode 100644 index 000000000..e1ddb07c7 --- /dev/null +++ b/archived_epr/src_old/layers/domain/repository/repository/__init__.py @@ -0,0 +1 @@ +from .v1 import * # noqa: F403, F401 diff --git a/src/layers/domain/repository/repository/tests/model_v1.py b/archived_epr/src_old/layers/domain/repository/repository/tests/model_v1.py similarity index 100% rename from src/layers/domain/repository/repository/tests/model_v1.py rename to archived_epr/src_old/layers/domain/repository/repository/tests/model_v1.py diff --git a/src/layers/domain/repository/repository/tests/test_repository_v1.py b/archived_epr/src_old/layers/domain/repository/repository/tests/test_repository_v1.py similarity index 100% rename from src/layers/domain/repository/repository/tests/test_repository_v1.py rename to archived_epr/src_old/layers/domain/repository/repository/tests/test_repository_v1.py diff --git a/src/layers/domain/repository/repository/v1.py b/archived_epr/src_old/layers/domain/repository/repository/v1.py similarity index 100% rename from src/layers/domain/repository/repository/v1.py rename to archived_epr/src_old/layers/domain/repository/repository/v1.py diff --git a/src/layers/etl_utils/constants/__init__.py b/archived_epr/src_old/layers/etl_utils/constants/__init__.py similarity index 100% rename from src/layers/etl_utils/constants/__init__.py rename to archived_epr/src_old/layers/etl_utils/constants/__init__.py diff --git a/src/layers/etl_utils/io/__init__.py b/archived_epr/src_old/layers/etl_utils/io/__init__.py similarity index 100% rename from src/layers/etl_utils/io/__init__.py rename to archived_epr/src_old/layers/etl_utils/io/__init__.py diff --git a/src/layers/etl_utils/io/test/io_utils.py b/archived_epr/src_old/layers/etl_utils/io/test/io_utils.py similarity index 100% rename from src/layers/etl_utils/io/test/io_utils.py rename to archived_epr/src_old/layers/etl_utils/io/test/io_utils.py diff --git a/src/layers/etl_utils/io/test/test_io_dump.py b/archived_epr/src_old/layers/etl_utils/io/test/test_io_dump.py similarity index 99% rename from src/layers/etl_utils/io/test/test_io_dump.py rename to archived_epr/src_old/layers/etl_utils/io/test/test_io_dump.py index b6d5ec428..4e8cd607b 100644 --- a/src/layers/etl_utils/io/test/test_io_dump.py +++ b/archived_epr/src_old/layers/etl_utils/io/test/test_io_dump.py @@ -7,7 +7,6 @@ # from etl.sds.tests.constants import EtlTestDataPath - # @pytest.mark.s3(EtlTestDataPath.FULL_JSON) Uncomment this when archived # def test_pkl_lz4(test_data_paths): # (path,) = test_data_paths diff --git a/src/layers/etl_utils/ldap_typing/__init__.py b/archived_epr/src_old/layers/etl_utils/ldap_typing/__init__.py similarity index 100% rename from src/layers/etl_utils/ldap_typing/__init__.py rename to archived_epr/src_old/layers/etl_utils/ldap_typing/__init__.py diff --git a/src/layers/etl_utils/ldif/_ldif.py b/archived_epr/src_old/layers/etl_utils/ldif/_ldif.py similarity index 100% rename from src/layers/etl_utils/ldif/_ldif.py rename to archived_epr/src_old/layers/etl_utils/ldif/_ldif.py diff --git a/src/layers/etl_utils/ldif/ldif.py b/archived_epr/src_old/layers/etl_utils/ldif/ldif.py similarity index 100% rename from src/layers/etl_utils/ldif/ldif.py rename to archived_epr/src_old/layers/etl_utils/ldif/ldif.py diff --git a/src/layers/etl_utils/ldif/model.py b/archived_epr/src_old/layers/etl_utils/ldif/model.py similarity index 100% rename from src/layers/etl_utils/ldif/model.py rename to archived_epr/src_old/layers/etl_utils/ldif/model.py diff --git a/src/layers/etl_utils/ldif/tests/test_ldif.py b/archived_epr/src_old/layers/etl_utils/ldif/tests/test_ldif.py similarity index 100% rename from src/layers/etl_utils/ldif/tests/test_ldif.py rename to archived_epr/src_old/layers/etl_utils/ldif/tests/test_ldif.py diff --git a/src/layers/etl_utils/make/make.py b/archived_epr/src_old/layers/etl_utils/make/make.py similarity index 100% rename from src/layers/etl_utils/make/make.py rename to archived_epr/src_old/layers/etl_utils/make/make.py diff --git a/src/layers/etl_utils/smart_open/__init__.py b/archived_epr/src_old/layers/etl_utils/smart_open/__init__.py similarity index 100% rename from src/layers/etl_utils/smart_open/__init__.py rename to archived_epr/src_old/layers/etl_utils/smart_open/__init__.py diff --git a/src/layers/etl_utils/trigger/logger.py b/archived_epr/src_old/layers/etl_utils/trigger/logger.py similarity index 100% rename from src/layers/etl_utils/trigger/logger.py rename to archived_epr/src_old/layers/etl_utils/trigger/logger.py diff --git a/src/layers/etl_utils/trigger/model.py b/archived_epr/src_old/layers/etl_utils/trigger/model.py similarity index 100% rename from src/layers/etl_utils/trigger/model.py rename to archived_epr/src_old/layers/etl_utils/trigger/model.py diff --git a/src/layers/etl_utils/trigger/notify.py b/archived_epr/src_old/layers/etl_utils/trigger/notify.py similarity index 100% rename from src/layers/etl_utils/trigger/notify.py rename to archived_epr/src_old/layers/etl_utils/trigger/notify.py diff --git a/src/layers/etl_utils/trigger/operations.py b/archived_epr/src_old/layers/etl_utils/trigger/operations.py similarity index 100% rename from src/layers/etl_utils/trigger/operations.py rename to archived_epr/src_old/layers/etl_utils/trigger/operations.py diff --git a/src/layers/etl_utils/trigger/tests/test_notify.py b/archived_epr/src_old/layers/etl_utils/trigger/tests/test_notify.py similarity index 99% rename from src/layers/etl_utils/trigger/tests/test_notify.py rename to archived_epr/src_old/layers/etl_utils/trigger/tests/test_notify.py index e5ff492e5..5c35796f2 100644 --- a/src/layers/etl_utils/trigger/tests/test_notify.py +++ b/archived_epr/src_old/layers/etl_utils/trigger/tests/test_notify.py @@ -5,11 +5,10 @@ from unittest.mock import Mock import pytest +from etl.notify.tests.test_notify_lambda import EXAMPLE_DOT_COM from etl_utils.trigger.notify import notify from event.json import json_loads -from etl.notify.tests.test_notify_lambda import EXAMPLE_DOT_COM - FUNCTION_NAME = "my-function" diff --git a/src/layers/etl_utils/trigger/tests/test_trigger_operations.py b/archived_epr/src_old/layers/etl_utils/trigger/tests/test_trigger_operations.py similarity index 100% rename from src/layers/etl_utils/trigger/tests/test_trigger_operations.py rename to archived_epr/src_old/layers/etl_utils/trigger/tests/test_trigger_operations.py diff --git a/src/layers/etl_utils/worker/action.py b/archived_epr/src_old/layers/etl_utils/worker/action.py similarity index 100% rename from src/layers/etl_utils/worker/action.py rename to archived_epr/src_old/layers/etl_utils/worker/action.py diff --git a/src/layers/etl_utils/worker/exception.py b/archived_epr/src_old/layers/etl_utils/worker/exception.py similarity index 100% rename from src/layers/etl_utils/worker/exception.py rename to archived_epr/src_old/layers/etl_utils/worker/exception.py diff --git a/src/layers/etl_utils/worker/model.py b/archived_epr/src_old/layers/etl_utils/worker/model.py similarity index 100% rename from src/layers/etl_utils/worker/model.py rename to archived_epr/src_old/layers/etl_utils/worker/model.py diff --git a/src/layers/etl_utils/worker/steps.py b/archived_epr/src_old/layers/etl_utils/worker/steps.py similarity index 100% rename from src/layers/etl_utils/worker/steps.py rename to archived_epr/src_old/layers/etl_utils/worker/steps.py diff --git a/src/layers/etl_utils/worker/tests/test_exception.py b/archived_epr/src_old/layers/etl_utils/worker/tests/test_exception.py similarity index 100% rename from src/layers/etl_utils/worker/tests/test_exception.py rename to archived_epr/src_old/layers/etl_utils/worker/tests/test_exception.py diff --git a/src/layers/etl_utils/worker/tests/test_worker_steps.py b/archived_epr/src_old/layers/etl_utils/worker/tests/test_worker_steps.py similarity index 100% rename from src/layers/etl_utils/worker/tests/test_worker_steps.py rename to archived_epr/src_old/layers/etl_utils/worker/tests/test_worker_steps.py diff --git a/src/layers/etl_utils/worker/worker_step_chain.py b/archived_epr/src_old/layers/etl_utils/worker/worker_step_chain.py similarity index 100% rename from src/layers/etl_utils/worker/worker_step_chain.py rename to archived_epr/src_old/layers/etl_utils/worker/worker_step_chain.py diff --git a/src/layers/sds/domain/__init__.py b/archived_epr/src_old/layers/sds/domain/__init__.py similarity index 100% rename from src/layers/sds/domain/__init__.py rename to archived_epr/src_old/layers/sds/domain/__init__.py diff --git a/src/layers/sds/domain/base.py b/archived_epr/src_old/layers/sds/domain/base.py similarity index 100% rename from src/layers/sds/domain/base.py rename to archived_epr/src_old/layers/sds/domain/base.py diff --git a/src/layers/sds/domain/changelog.py b/archived_epr/src_old/layers/sds/domain/changelog.py similarity index 100% rename from src/layers/sds/domain/changelog.py rename to archived_epr/src_old/layers/sds/domain/changelog.py diff --git a/src/layers/sds/domain/constants.py b/archived_epr/src_old/layers/sds/domain/constants.py similarity index 100% rename from src/layers/sds/domain/constants.py rename to archived_epr/src_old/layers/sds/domain/constants.py diff --git a/src/layers/sds/domain/nhs_accredited_system.py b/archived_epr/src_old/layers/sds/domain/nhs_accredited_system.py similarity index 100% rename from src/layers/sds/domain/nhs_accredited_system.py rename to archived_epr/src_old/layers/sds/domain/nhs_accredited_system.py diff --git a/src/layers/sds/domain/nhs_mhs.py b/archived_epr/src_old/layers/sds/domain/nhs_mhs.py similarity index 100% rename from src/layers/sds/domain/nhs_mhs.py rename to archived_epr/src_old/layers/sds/domain/nhs_mhs.py diff --git a/src/layers/sds/domain/nhs_mhs_action.py b/archived_epr/src_old/layers/sds/domain/nhs_mhs_action.py similarity index 100% rename from src/layers/sds/domain/nhs_mhs_action.py rename to archived_epr/src_old/layers/sds/domain/nhs_mhs_action.py diff --git a/src/layers/sds/domain/nhs_mhs_cp.py b/archived_epr/src_old/layers/sds/domain/nhs_mhs_cp.py similarity index 100% rename from src/layers/sds/domain/nhs_mhs_cp.py rename to archived_epr/src_old/layers/sds/domain/nhs_mhs_cp.py diff --git a/src/layers/sds/domain/nhs_mhs_service.py b/archived_epr/src_old/layers/sds/domain/nhs_mhs_service.py similarity index 100% rename from src/layers/sds/domain/nhs_mhs_service.py rename to archived_epr/src_old/layers/sds/domain/nhs_mhs_service.py diff --git a/src/layers/sds/domain/organizational_unit.py b/archived_epr/src_old/layers/sds/domain/organizational_unit.py similarity index 100% rename from src/layers/sds/domain/organizational_unit.py rename to archived_epr/src_old/layers/sds/domain/organizational_unit.py diff --git a/src/layers/sds/domain/parse.py b/archived_epr/src_old/layers/sds/domain/parse.py similarity index 100% rename from src/layers/sds/domain/parse.py rename to archived_epr/src_old/layers/sds/domain/parse.py diff --git a/src/layers/sds/domain/sds_deletion_request.py b/archived_epr/src_old/layers/sds/domain/sds_deletion_request.py similarity index 100% rename from src/layers/sds/domain/sds_deletion_request.py rename to archived_epr/src_old/layers/sds/domain/sds_deletion_request.py diff --git a/src/layers/sds/domain/sds_modification_request.py b/archived_epr/src_old/layers/sds/domain/sds_modification_request.py similarity index 100% rename from src/layers/sds/domain/sds_modification_request.py rename to archived_epr/src_old/layers/sds/domain/sds_modification_request.py diff --git a/src/layers/sds/domain/tests/data/good_data.ldif b/archived_epr/src_old/layers/sds/domain/tests/data/good_data.ldif similarity index 100% rename from src/layers/sds/domain/tests/data/good_data.ldif rename to archived_epr/src_old/layers/sds/domain/tests/data/good_data.ldif diff --git a/src/layers/sds/domain/tests/test_base.py b/archived_epr/src_old/layers/sds/domain/tests/test_base.py similarity index 100% rename from src/layers/sds/domain/tests/test_base.py rename to archived_epr/src_old/layers/sds/domain/tests/test_base.py diff --git a/src/layers/sds/domain/tests/test_constants.py b/archived_epr/src_old/layers/sds/domain/tests/test_constants.py similarity index 100% rename from src/layers/sds/domain/tests/test_constants.py rename to archived_epr/src_old/layers/sds/domain/tests/test_constants.py diff --git a/src/layers/sds/domain/tests/test_sds_bulk_model.py b/archived_epr/src_old/layers/sds/domain/tests/test_sds_bulk_model.py similarity index 100% rename from src/layers/sds/domain/tests/test_sds_bulk_model.py rename to archived_epr/src_old/layers/sds/domain/tests/test_sds_bulk_model.py index 055412943..312ecb4f1 100644 --- a/src/layers/sds/domain/tests/test_sds_bulk_model.py +++ b/archived_epr/src_old/layers/sds/domain/tests/test_sds_bulk_model.py @@ -4,12 +4,12 @@ import boto3 import pytest +from etl.sds.tests.constants import EtlTestDataPath from etl_utils.ldif.ldif import filter_ldif_from_s3_by_property, parse_ldif from sds.domain.nhs_accredited_system import NhsAccreditedSystem from sds.domain.nhs_mhs import NhsMhs from sds.domain.parse import parse_sds_record -from etl.sds.tests.constants import EtlTestDataPath from test_helpers.pytest_skips import memory_intensive from test_helpers.terraform import read_terraform_output diff --git a/src/layers/sds/domain/tests/test_sds_changelog_model.py b/archived_epr/src_old/layers/sds/domain/tests/test_sds_changelog_model.py similarity index 100% rename from src/layers/sds/domain/tests/test_sds_changelog_model.py rename to archived_epr/src_old/layers/sds/domain/tests/test_sds_changelog_model.py diff --git a/src/layers/sds/domain/tests/test_sds_modification_request.py b/archived_epr/src_old/layers/sds/domain/tests/test_sds_modification_request.py similarity index 100% rename from src/layers/sds/domain/tests/test_sds_modification_request.py rename to archived_epr/src_old/layers/sds/domain/tests/test_sds_modification_request.py diff --git a/src/layers/sds/epr/bulk_create/bulk_create.py b/archived_epr/src_old/layers/sds/epr/bulk_create/bulk_create.py similarity index 100% rename from src/layers/sds/epr/bulk_create/bulk_create.py rename to archived_epr/src_old/layers/sds/epr/bulk_create/bulk_create.py diff --git a/src/layers/sds/epr/bulk_create/bulk_load_fanout.py b/archived_epr/src_old/layers/sds/epr/bulk_create/bulk_load_fanout.py similarity index 100% rename from src/layers/sds/epr/bulk_create/bulk_load_fanout.py rename to archived_epr/src_old/layers/sds/epr/bulk_create/bulk_load_fanout.py diff --git a/src/layers/sds/epr/bulk_create/bulk_repository.py b/archived_epr/src_old/layers/sds/epr/bulk_create/bulk_repository.py similarity index 100% rename from src/layers/sds/epr/bulk_create/bulk_repository.py rename to archived_epr/src_old/layers/sds/epr/bulk_create/bulk_repository.py diff --git a/src/layers/sds/epr/bulk_create/epr_product_team_repository.py b/archived_epr/src_old/layers/sds/epr/bulk_create/epr_product_team_repository.py similarity index 100% rename from src/layers/sds/epr/bulk_create/epr_product_team_repository.py rename to archived_epr/src_old/layers/sds/epr/bulk_create/epr_product_team_repository.py diff --git a/src/layers/sds/epr/bulk_create/tests/conftest.py b/archived_epr/src_old/layers/sds/epr/bulk_create/tests/conftest.py similarity index 100% rename from src/layers/sds/epr/bulk_create/tests/conftest.py rename to archived_epr/src_old/layers/sds/epr/bulk_create/tests/conftest.py diff --git a/src/layers/sds/epr/bulk_create/tests/test_bulk_create.py b/archived_epr/src_old/layers/sds/epr/bulk_create/tests/test_bulk_create.py similarity index 100% rename from src/layers/sds/epr/bulk_create/tests/test_bulk_create.py rename to archived_epr/src_old/layers/sds/epr/bulk_create/tests/test_bulk_create.py diff --git a/src/layers/sds/epr/bulk_create/tests/test_bulk_repository.py b/archived_epr/src_old/layers/sds/epr/bulk_create/tests/test_bulk_repository.py similarity index 100% rename from src/layers/sds/epr/bulk_create/tests/test_bulk_repository.py rename to archived_epr/src_old/layers/sds/epr/bulk_create/tests/test_bulk_repository.py diff --git a/src/layers/sds/epr/bulk_create/tests/test_epr_product_team_repository.py b/archived_epr/src_old/layers/sds/epr/bulk_create/tests/test_epr_product_team_repository.py similarity index 100% rename from src/layers/sds/epr/bulk_create/tests/test_epr_product_team_repository.py rename to archived_epr/src_old/layers/sds/epr/bulk_create/tests/test_epr_product_team_repository.py diff --git a/src/layers/sds/epr/constants.py b/archived_epr/src_old/layers/sds/epr/constants.py similarity index 100% rename from src/layers/sds/epr/constants.py rename to archived_epr/src_old/layers/sds/epr/constants.py diff --git a/src/layers/sds/epr/creators.py b/archived_epr/src_old/layers/sds/epr/creators.py similarity index 100% rename from src/layers/sds/epr/creators.py rename to archived_epr/src_old/layers/sds/epr/creators.py diff --git a/src/layers/sds/epr/getters.py b/archived_epr/src_old/layers/sds/epr/getters.py similarity index 100% rename from src/layers/sds/epr/getters.py rename to archived_epr/src_old/layers/sds/epr/getters.py diff --git a/src/layers/sds/epr/interactions.py b/archived_epr/src_old/layers/sds/epr/interactions.py similarity index 100% rename from src/layers/sds/epr/interactions.py rename to archived_epr/src_old/layers/sds/epr/interactions.py diff --git a/src/layers/sds/epr/readers.py b/archived_epr/src_old/layers/sds/epr/readers.py similarity index 100% rename from src/layers/sds/epr/readers.py rename to archived_epr/src_old/layers/sds/epr/readers.py diff --git a/src/layers/sds/epr/tags.py b/archived_epr/src_old/layers/sds/epr/tags.py similarity index 100% rename from src/layers/sds/epr/tags.py rename to archived_epr/src_old/layers/sds/epr/tags.py diff --git a/src/layers/sds/epr/tests/conftest.py b/archived_epr/src_old/layers/sds/epr/tests/conftest.py similarity index 100% rename from src/layers/sds/epr/tests/conftest.py rename to archived_epr/src_old/layers/sds/epr/tests/conftest.py diff --git a/src/layers/sds/epr/tests/test_creators.py b/archived_epr/src_old/layers/sds/epr/tests/test_creators.py similarity index 100% rename from src/layers/sds/epr/tests/test_creators.py rename to archived_epr/src_old/layers/sds/epr/tests/test_creators.py diff --git a/src/layers/sds/epr/tests/test_getters.py b/archived_epr/src_old/layers/sds/epr/tests/test_getters.py similarity index 100% rename from src/layers/sds/epr/tests/test_getters.py rename to archived_epr/src_old/layers/sds/epr/tests/test_getters.py diff --git a/src/layers/sds/epr/tests/test_readers.py b/archived_epr/src_old/layers/sds/epr/tests/test_readers.py similarity index 100% rename from src/layers/sds/epr/tests/test_readers.py rename to archived_epr/src_old/layers/sds/epr/tests/test_readers.py diff --git a/src/layers/sds/epr/tests/test_tags.py b/archived_epr/src_old/layers/sds/epr/tests/test_tags.py similarity index 100% rename from src/layers/sds/epr/tests/test_tags.py rename to archived_epr/src_old/layers/sds/epr/tests/test_tags.py diff --git a/src/layers/sds/epr/tests/test_updaters.py b/archived_epr/src_old/layers/sds/epr/tests/test_updaters.py similarity index 100% rename from src/layers/sds/epr/tests/test_updaters.py rename to archived_epr/src_old/layers/sds/epr/tests/test_updaters.py diff --git a/src/layers/sds/epr/tests/test_utils.py b/archived_epr/src_old/layers/sds/epr/tests/test_utils.py similarity index 100% rename from src/layers/sds/epr/tests/test_utils.py rename to archived_epr/src_old/layers/sds/epr/tests/test_utils.py diff --git a/src/layers/sds/epr/updaters.py b/archived_epr/src_old/layers/sds/epr/updaters.py similarity index 100% rename from src/layers/sds/epr/updaters.py rename to archived_epr/src_old/layers/sds/epr/updaters.py diff --git a/src/layers/sds/epr/updates/change_request_processors.py b/archived_epr/src_old/layers/sds/epr/updates/change_request_processors.py similarity index 100% rename from src/layers/sds/epr/updates/change_request_processors.py rename to archived_epr/src_old/layers/sds/epr/updates/change_request_processors.py diff --git a/src/layers/sds/epr/updates/change_request_routing.py b/archived_epr/src_old/layers/sds/epr/updates/change_request_routing.py similarity index 100% rename from src/layers/sds/epr/updates/change_request_routing.py rename to archived_epr/src_old/layers/sds/epr/updates/change_request_routing.py diff --git a/src/layers/sds/epr/updates/etl_device.py b/archived_epr/src_old/layers/sds/epr/updates/etl_device.py similarity index 100% rename from src/layers/sds/epr/updates/etl_device.py rename to archived_epr/src_old/layers/sds/epr/updates/etl_device.py diff --git a/src/layers/sds/epr/updates/etl_device_repository.py b/archived_epr/src_old/layers/sds/epr/updates/etl_device_repository.py similarity index 100% rename from src/layers/sds/epr/updates/etl_device_repository.py rename to archived_epr/src_old/layers/sds/epr/updates/etl_device_repository.py diff --git a/src/layers/sds/epr/updates/etl_update_repository.py b/archived_epr/src_old/layers/sds/epr/updates/etl_update_repository.py similarity index 100% rename from src/layers/sds/epr/updates/etl_update_repository.py rename to archived_epr/src_old/layers/sds/epr/updates/etl_update_repository.py diff --git a/src/layers/sds/epr/updates/modification_request_routing.py b/archived_epr/src_old/layers/sds/epr/updates/modification_request_routing.py similarity index 100% rename from src/layers/sds/epr/updates/modification_request_routing.py rename to archived_epr/src_old/layers/sds/epr/updates/modification_request_routing.py diff --git a/src/layers/sds/epr/updates/tests/test_change_request_routing.py b/archived_epr/src_old/layers/sds/epr/updates/tests/test_change_request_routing.py similarity index 100% rename from src/layers/sds/epr/updates/tests/test_change_request_routing.py rename to archived_epr/src_old/layers/sds/epr/updates/tests/test_change_request_routing.py diff --git a/src/layers/sds/epr/updates/tests/test_etl_device.py b/archived_epr/src_old/layers/sds/epr/updates/tests/test_etl_device.py similarity index 100% rename from src/layers/sds/epr/updates/tests/test_etl_device.py rename to archived_epr/src_old/layers/sds/epr/updates/tests/test_etl_device.py diff --git a/src/layers/sds/epr/updates/tests/test_etl_device_repository.py b/archived_epr/src_old/layers/sds/epr/updates/tests/test_etl_device_repository.py similarity index 100% rename from src/layers/sds/epr/updates/tests/test_etl_device_repository.py rename to archived_epr/src_old/layers/sds/epr/updates/tests/test_etl_device_repository.py diff --git a/src/layers/sds/epr/updates/tests/test_modification_request_routing.py b/archived_epr/src_old/layers/sds/epr/updates/tests/test_modification_request_routing.py similarity index 100% rename from src/layers/sds/epr/updates/tests/test_modification_request_routing.py rename to archived_epr/src_old/layers/sds/epr/updates/tests/test_modification_request_routing.py diff --git a/src/layers/sds/epr/updates/tests/test_process_request_to_add_as.py b/archived_epr/src_old/layers/sds/epr/updates/tests/test_process_request_to_add_as.py similarity index 100% rename from src/layers/sds/epr/updates/tests/test_process_request_to_add_as.py rename to archived_epr/src_old/layers/sds/epr/updates/tests/test_process_request_to_add_as.py diff --git a/src/layers/sds/epr/updates/tests/test_process_request_to_add_mhs.py b/archived_epr/src_old/layers/sds/epr/updates/tests/test_process_request_to_add_mhs.py similarity index 100% rename from src/layers/sds/epr/updates/tests/test_process_request_to_add_mhs.py rename to archived_epr/src_old/layers/sds/epr/updates/tests/test_process_request_to_add_mhs.py diff --git a/src/layers/sds/epr/updates/tests/test_process_request_to_delete_as.py b/archived_epr/src_old/layers/sds/epr/updates/tests/test_process_request_to_delete_as.py similarity index 100% rename from src/layers/sds/epr/updates/tests/test_process_request_to_delete_as.py rename to archived_epr/src_old/layers/sds/epr/updates/tests/test_process_request_to_delete_as.py diff --git a/src/layers/sds/epr/updates/tests/test_process_request_to_delete_mhs.py b/archived_epr/src_old/layers/sds/epr/updates/tests/test_process_request_to_delete_mhs.py similarity index 100% rename from src/layers/sds/epr/updates/tests/test_process_request_to_delete_mhs.py rename to archived_epr/src_old/layers/sds/epr/updates/tests/test_process_request_to_delete_mhs.py diff --git a/src/layers/sds/epr/updates/tests/test_process_to_modify_as.py b/archived_epr/src_old/layers/sds/epr/updates/tests/test_process_to_modify_as.py similarity index 100% rename from src/layers/sds/epr/updates/tests/test_process_to_modify_as.py rename to archived_epr/src_old/layers/sds/epr/updates/tests/test_process_to_modify_as.py diff --git a/src/layers/sds/epr/updates/tests/test_process_to_modify_mhs.py b/archived_epr/src_old/layers/sds/epr/updates/tests/test_process_to_modify_mhs.py similarity index 100% rename from src/layers/sds/epr/updates/tests/test_process_to_modify_mhs.py rename to archived_epr/src_old/layers/sds/epr/updates/tests/test_process_to_modify_mhs.py diff --git a/src/layers/sds/epr/utils.py b/archived_epr/src_old/layers/sds/epr/utils.py similarity index 100% rename from src/layers/sds/epr/utils.py rename to archived_epr/src_old/layers/sds/epr/utils.py diff --git a/src/layers/sds/make/make.py b/archived_epr/src_old/layers/sds/make/make.py similarity index 100% rename from src/layers/sds/make/make.py rename to archived_epr/src_old/layers/sds/make/make.py diff --git a/src/layers/sds/worker/load.py b/archived_epr/src_old/layers/sds/worker/load.py similarity index 100% rename from src/layers/sds/worker/load.py rename to archived_epr/src_old/layers/sds/worker/load.py diff --git a/src/layers/third_party/dockerfiles/sds_update/Dockerfile b/archived_epr/src_old/layers/third_party/dockerfiles/sds_update/Dockerfile similarity index 100% rename from src/layers/third_party/dockerfiles/sds_update/Dockerfile rename to archived_epr/src_old/layers/third_party/dockerfiles/sds_update/Dockerfile diff --git a/src/layers/third_party/dockerfiles/sds_update/resolve_dependencies.py b/archived_epr/src_old/layers/third_party/dockerfiles/sds_update/resolve_dependencies.py similarity index 100% rename from src/layers/third_party/dockerfiles/sds_update/resolve_dependencies.py rename to archived_epr/src_old/layers/third_party/dockerfiles/sds_update/resolve_dependencies.py diff --git a/src/layers/third_party/dockerfiles/sds_update/resolve_dependencies.sh b/archived_epr/src_old/layers/third_party/dockerfiles/sds_update/resolve_dependencies.sh similarity index 100% rename from src/layers/third_party/dockerfiles/sds_update/resolve_dependencies.sh rename to archived_epr/src_old/layers/third_party/dockerfiles/sds_update/resolve_dependencies.sh diff --git a/changelog/2025-02-26.md b/changelog/2025-02-26.md new file mode 100644 index 000000000..260c8efb4 --- /dev/null +++ b/changelog/2025-02-26.md @@ -0,0 +1,5 @@ +- [PI-793] Remove EPR ETL +- [PI-795] Remove EPR repository layers +- [PI-834] Remove EPR domain logic +- Dependabot: Update black +- Dependabot: Update attrs diff --git a/infrastructure/terraform/per_workspace/main.tf b/infrastructure/terraform/per_workspace/main.tf index ff38e04b7..b2ac446d3 100644 --- a/infrastructure/terraform/per_workspace/main.tf +++ b/infrastructure/terraform/per_workspace/main.tf @@ -200,26 +200,3 @@ module "api_entrypoint" { domain = module.domain.domain_cert depends_on = [module.domain] } - -data "aws_s3_bucket" "truststore_bucket" { - bucket = "${local.project}--${replace(var.environment, "_", "-")}--truststore" -} - - -module "sds_etl" { - source = "./modules/etl/sds" - workspace_prefix = "${local.project}--${replace(terraform.workspace, "_", "-")}" - assume_account = var.assume_account - python_version = var.python_version - event_layer_arn = element([for instance in module.layers : instance if instance.name == "event"], 0).layer_arn - third_party_core_layer_arn = element([for instance in module.third_party_layers : instance if instance.name == "third_party_sds"], 0).layer_arn - third_party_sds_update_layer_arn = element([for instance in module.third_party_layers : instance if instance.name == "third_party_sds_update"], 0).layer_arn - domain_layer_arn = element([for instance in module.layers : instance if instance.name == "domain"], 0).layer_arn - sds_layer_arn = element([for instance in module.layers : instance if instance.name == "sds"], 0).layer_arn - table_name = module.eprtable.dynamodb_table_name - table_arn = module.eprtable.dynamodb_table_arn - is_persistent = var.workspace_type == "PERSISTENT" - truststore_bucket = data.aws_s3_bucket.truststore_bucket - etl_snapshot_bucket = local.etl_snapshot_bucket - environment = var.environment -} diff --git a/infrastructure/terraform/per_workspace/outputs.tf b/infrastructure/terraform/per_workspace/outputs.tf index 0ac8a5352..4e06525b7 100644 --- a/infrastructure/terraform/per_workspace/outputs.tf +++ b/infrastructure/terraform/per_workspace/outputs.tf @@ -22,14 +22,6 @@ output "invoke_url" { value = module.api_entrypoint.invoke_url } -output "sds_etl" { - value = module.sds_etl -} - -output "manual_trigger_arn" { - value = module.sds_etl.manual_trigger_arn -} - output "test_data_bucket" { value = "${local.project}--${replace(var.account_name, "_", "-")}--test-data" } diff --git a/pyproject.toml b/pyproject.toml index ac09211d6..9066dc206 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "connecting-party-manager" -version = "2025.02.24" +version = "2025.02.26" description = "Repository for the Connecting Party Manager API and related services" authors = ["NHS England"] license = "LICENSE.md" @@ -10,8 +10,6 @@ packages = [ { include = "domain", from = "src/layers" }, { include = "event", from = "src/layers" }, { include = "api_utils", from = "src/layers" }, - { include = "etl_utils", from = "src/layers" }, - { include = "sds", from = "src/layers" }, { include = "builder", from = "scripts" }, ] @@ -25,13 +23,13 @@ email-validator = "^2.1.0.post1" smart-open = "^7.0.1" urllib3 = "<3" orjson = "^3.9.15" -attrs = "^24.2.0" +attrs = "^25.1.0" locust = "^2.29.1" jsonschema = "^4.23.0" [tool.poetry.group.dev.dependencies] pre-commit = "^4.0.0" -black = "^24.1.1" +black = "^25.1.0" flake8 = "^7.0.0" behave = "^1.2.6" pytest = "^8.2.0" @@ -65,15 +63,15 @@ optional = true [tool.poetry.group.local.dependencies] ipython = "^8.17.2" -[tool.poetry.group.sds_update] -optional = true - -[tool.poetry.group.sds_update.dependencies] -python-ldap = "^3.4.4" -lz4 = "^4.3.3" - -[tool.poetry.group.sds.dependencies] -lz4 = "^4.3.3" +# [tool.poetry.group.sds_update] +# optional = true +# +# [tool.poetry.group.sds_update.dependencies] +# python-ldap = "^3.4.4" +# lz4 = "^4.3.3" +# +# [tool.poetry.group.sds.dependencies] +# lz4 = "^4.3.3" [tool.poetry.group.ci.dependencies] pygithub = "^2.3.0" diff --git a/scripts/builder/build.mk b/scripts/builder/build.mk index a1a2e84d5..40670946c 100644 --- a/scripts/builder/build.mk +++ b/scripts/builder/build.mk @@ -6,18 +6,18 @@ POSTMAN_COLLECTION = $(CURDIR)/src/api/tests/feature_tests/postman-collection.js TOOL_VERSIONS_COPY = $(TIMESTAMP_DIR)/tool-versions.copy POETRY_LOCK = $(CURDIR)/poetry.lock INIT_TIMESTAMP = $(CURDIR)/.timestamp/init.timestamp -SRC_FILES = $(shell find src/api src/etl src/layers src/test_helpers -type f -name "*.py" -not -path "*/feature_tests/*" -not -path "*/test_*" -not -path "*/fhir/r4/strict_models.py" -not -path "*/fhir/r4/models.py" -not -path "*/archived_epr/*") +SRC_FILES = $(shell find src/api src/layers src/test_helpers -type f -name "*.py" -not -path "*/feature_tests/*" -not -path "*/test_*" -not -path "*/fhir/r4/strict_models.py" -not -path "*/fhir/r4/models.py" -not -path "*/archived_epr/*") THIRD_PARTY_DIST = $(CURDIR)/src/layers/third_party/dist SWAGGER_DIST = $(CURDIR)/infrastructure/swagger/dist SWAGGER_PUBLIC = $(SWAGGER_DIST)/public/swagger.yaml SWAGGER_AWS = $(SWAGGER_DIST)/aws/swagger.yaml BUILD_DEPENDENCIES = $(INIT_TIMESTAMP) \ - $(SRC_FILES) \ - $(TOOL_VERSIONS_COPY) \ - $(POETRY_LOCK) \ - $(SWAGGER_PUBLIC) \ - $(SWAGGER_AWS) + $(SRC_FILES) \ + $(TOOL_VERSIONS_COPY) \ + $(POETRY_LOCK) \ + $(SWAGGER_PUBLIC) \ + $(SWAGGER_AWS) clean: poetry--clean swagger--clean terraform--clean ## Complete clear-out of the project installation and artifacts diff --git a/src/conftest.py b/src/conftest.py index dddf361e3..714b67b8e 100644 --- a/src/conftest.py +++ b/src/conftest.py @@ -3,7 +3,6 @@ from pathlib import Path import boto3 -from etl_utils.constants import ETL_STATE_LOCK from event.aws.client import dynamodb_client from event.logging.logger import setup_logger from nhs_context_logging.fixtures import ( # noqa: F401 @@ -151,17 +150,6 @@ def clear_dynamodb_table_(request: FixtureRequest): yield -@fixture(autouse=True) -def clear_etl_state_lock_(request: FixtureRequest): - if is_integration(request): - s3_client = boto3.client("s3") - bucket_name = read_terraform_output("sds_etl.value.bucket") - s3_client.delete_object(Bucket=bucket_name, Key=ETL_STATE_LOCK) - yield - else: - yield - - @fixture(autouse=True) def test_data_paths(request: FixtureRequest): """ diff --git a/src/etl/notify/make/make.py b/src/etl/notify/make/make.py deleted file mode 100644 index f33cd7068..000000000 --- a/src/etl/notify/make/make.py +++ /dev/null @@ -1,4 +0,0 @@ -from builder.lambda_build import build - -if __name__ == "__main__": - build(__file__) diff --git a/src/etl/notify/notify.py b/src/etl/notify/notify.py deleted file mode 100644 index ea9298bdf..000000000 --- a/src/etl/notify/notify.py +++ /dev/null @@ -1,29 +0,0 @@ -from event.environment import BaseEnvironment - -from etl.notify.operations import EtlStatus, parse_message, send_notification - - -class NotifyEnvironment(BaseEnvironment): - WORKSPACE: str - ENVIRONMENT: str - SLACK_WEBHOOK_URL: str - - -ENVIRONMENT = NotifyEnvironment.build() - - -def handler(event: list[dict], context=None): - status = EtlStatus.PASS - for response in filter(bool, map(parse_message, event)): - _status = EtlStatus.parse_response(response) - send_notification( - slack_webhook_url=ENVIRONMENT.SLACK_WEBHOOK_URL, - workspace=ENVIRONMENT.WORKSPACE, - environment=ENVIRONMENT.ENVIRONMENT, - headline=response.message, - error_message=response.error_message or "None", - ) - if status is EtlStatus.PASS: - status = _status - - return str(status) diff --git a/src/etl/notify/operations.py b/src/etl/notify/operations.py deleted file mode 100644 index 0c2aa8aab..000000000 --- a/src/etl/notify/operations.py +++ /dev/null @@ -1,41 +0,0 @@ -from enum import StrEnum, auto - -import requests -from etl_utils.trigger.model import TriggerResponse -from etl_utils.worker.model import WorkerResponse - - -class EtlStatus(StrEnum): - PASS = auto() - FAIL = auto() - - @classmethod - def parse_response(cls, response: TriggerResponse): - return cls.PASS if response.error_message is None else cls.FAIL - - -def parse_message(message: dict) -> TriggerResponse: - try: - return TriggerResponse(**message) - except TypeError: - pass - - try: - response = WorkerResponse(**message) - return TriggerResponse( - message=( - f"ETL stage '{response.stage_name}' generated {response.processed_records} " - f"output records with {response.unprocessed_records} input records " - "remaining to be processed" - ), - error_message=response.error_message, - ) - except TypeError: - pass - - return None - - -def send_notification(slack_webhook_url, **data): - response = requests.post(url=slack_webhook_url, json=data) - return response.text diff --git a/src/etl/notify/tests/test_notify_lambda.py b/src/etl/notify/tests/test_notify_lambda.py deleted file mode 100644 index b23735d0d..000000000 --- a/src/etl/notify/tests/test_notify_lambda.py +++ /dev/null @@ -1,134 +0,0 @@ -import os -from unittest import mock - -import pytest - -EXAMPLE_DOT_COM = "https://httpbin.org" - -NOTIFY_ENVIRONMENT = { - "SLACK_WEBHOOK_URL": EXAMPLE_DOT_COM, - "ENVIRONMENT": "foo", - "WORKSPACE": "bar", -} - -STATE_MACHINE_INPUT_WITHOUT_ERROR_MESSAGE = [ - { - "ETag": '"xxx"', - "ServerSideEncryption": "AES256", - "VersionId": "VHBhCwygSeYxgEEecU7N.meZl3uKDoaA", - }, - { - "stage_name": "load", - "processed_records": 0, - "unprocessed_records": 0, - "error_message": None, - }, -] - -STATE_MACHINE_INPUT_WITH_ERROR_MESSAGE = [ - { - "ETag": '"xxx"', - "ServerSideEncryption": "AES256", - "VersionId": "VHBhCwygSeYxgEEecU7N.meZl3uKDoaA", - }, - { - "stage_name": "load", - "processed_records": 0, - "unprocessed_records": 0, - "error_message": "oops", - }, -] - - -@pytest.mark.parametrize( - ["input", "output"], - [ - (STATE_MACHINE_INPUT_WITHOUT_ERROR_MESSAGE, "pass"), - (STATE_MACHINE_INPUT_WITH_ERROR_MESSAGE, "fail"), - ], -) -def test_notify_lambda_with_state_machine_input(input, output): - with mock.patch.dict(os.environ, NOTIFY_ENVIRONMENT, clear=True): - from etl.notify import notify - - assert notify.handler(event=input) == output - - -# @pytest.mark.integration -# def test_notify_lambda_without_error_message(): -# notify_lambda_arn = read_terraform_output("sds_etl.value.notify_lambda_arn") - -# lambda_client = boto3.client("lambda") -# response = lambda_client.invoke( -# FunctionName=notify_lambda_arn, -# Payload=json.dumps([{"message": "test"}]).encode(), -# ) -# decoded_payload = response["Payload"].read().decode("utf-8") -# decoded_payload = decoded_payload.strip('"') -# assert decoded_payload == "pass" - - -# @pytest.mark.integration -# def test_notify_lambda_with_error_message(): -# notify_lambda_arn = read_terraform_output("sds_etl.value.notify_lambda_arn") - -# lambda_client = boto3.client("lambda") -# response = lambda_client.invoke( -# FunctionName=notify_lambda_arn, -# Payload=json.dumps( -# [ -# { -# "message": "test", -# "error_message": "this is an error", -# } -# ] -# ).encode(), -# ) -# decoded_payload = response["Payload"].read().decode("utf-8") -# decoded_payload = decoded_payload.strip('"') -# assert decoded_payload == "fail" - - -# @pytest.mark.integration -# def test_notify_lambda_with_worker_response_without_error_message(): -# notify_lambda_arn = read_terraform_output("sds_etl.value.notify_lambda_arn") - -# lambda_client = boto3.client("lambda") -# response = lambda_client.invoke( -# FunctionName=notify_lambda_arn, -# Payload=json.dumps( -# [ -# { -# "stage_name": "test", -# "processed_records": 123, -# "unprocessed_records": 123, -# } -# ] -# ).encode(), -# ) -# decoded_payload = response["Payload"].read().decode("utf-8") -# decoded_payload = decoded_payload.strip('"') -# assert decoded_payload == "pass" - - -# @pytest.mark.integration -# def test_notify_lambda_with_worker_response_with_error_message(): -# notify_lambda_arn = read_terraform_output("sds_etl.value.notify_lambda_arn") - -# lambda_client = boto3.client("lambda") -# response = lambda_client.invoke( -# FunctionName=notify_lambda_arn, -# Payload=json.dumps( -# [ -# { -# "stage_name": "test", -# "processed_records": 123, -# "unprocessed_records": 123, -# "error_message": "this is an error", -# } -# ] -# ).encode(), -# ) -# decoded_payload = response["Payload"].read().decode("utf-8") -# decoded_payload = decoded_payload.strip('"') -# assert decoded_payload == "fail" diff --git a/src/etl/notify/tests/test_operations.py b/src/etl/notify/tests/test_operations.py deleted file mode 100644 index 915c65fe9..000000000 --- a/src/etl/notify/tests/test_operations.py +++ /dev/null @@ -1,56 +0,0 @@ -import pytest -from etl_utils.trigger.model import TriggerResponse - -from etl.notify.operations import EtlStatus, parse_message, send_notification - -from .test_notify_lambda import EXAMPLE_DOT_COM - - -@pytest.mark.parametrize( - ["error_message", "status"], - [ - (None, EtlStatus.PASS), - ("oops", EtlStatus.FAIL), - ], -) -def test_parse_response(error_message, status): - assert ( - EtlStatus.parse_response( - TriggerResponse(message="", error_message=error_message) - ) - is status - ) - - -@pytest.mark.parametrize( - ["message", "result"], - [ - ( - {"message": "i'm a trigger response", "error_message": "oops"}, - TriggerResponse(message="i'm a trigger response", error_message="oops"), - ), - ( - { - "stage_name": "bulk", - "processed_records": 123, - "unprocessed_records": 321, - "error_message": "whoops", - }, - TriggerResponse( - message=( - "ETL stage 'bulk' generated 123 output records with " - "321 input records remaining to be processed" - ), - error_message="whoops", - ), - ), - ({"foo": "bar"}, None), - ], -) -def test_parse_message(message, result): - assert parse_message(message=message) == result - - -def test_send_notification(): - response = send_notification(EXAMPLE_DOT_COM, foo=[123], bar="abc") - assert len(response) > 0 diff --git a/src/etl/sds/etl_state_lock_enforcer/etl_state_lock_enforcer.py b/src/etl/sds/etl_state_lock_enforcer/etl_state_lock_enforcer.py deleted file mode 100644 index 68c2e717a..000000000 --- a/src/etl/sds/etl_state_lock_enforcer/etl_state_lock_enforcer.py +++ /dev/null @@ -1,57 +0,0 @@ -import boto3 -from etl_utils.trigger.logger import log_action -from etl_utils.trigger.notify import notify -from event.environment import BaseEnvironment -from event.step_chain import StepChain - -from .steps import _process_sqs_message, steps - - -class EtlStateLockEnvironment(BaseEnvironment): - STATE_MACHINE_ARN: str - NOTIFY_LAMBDA_ARN: str - ETL_BUCKET: str - - -S3_CLIENT = boto3.client("s3") -STEP_FUNCTIONS_CLIENT = boto3.client("stepfunctions") -LAMBDA_CLIENT = boto3.client("lambda") -ENVIRONMENT = EtlStateLockEnvironment.build() - - -CACHE = { - "s3_client": S3_CLIENT, - "step_functions_client": STEP_FUNCTIONS_CLIENT, - "state_machine_arn": ENVIRONMENT.STATE_MACHINE_ARN, - "etl_bucket": ENVIRONMENT.ETL_BUCKET, - "manual_retry_state": False, -} - - -def handler(event=dict, context=None): - for message in event["Records"]: - process_message(message) - - return None # Returning None is crucial because it indicates a successful processing of the message to the SQS service. - # If this return value is changed, the SQS service might interpret it as a failure, causing the message to be retried or stuck in the queue. - - -def process_message(message): - step_chain = StepChain(step_chain=steps, step_decorators=[log_action]) - step_chain.run(cache=CACHE, init=message) - - trigger_type = "null" - result = step_chain.data[_process_sqs_message] - if isinstance(result, tuple): - state_machine_input, _ = result - if not isinstance(state_machine_input, Exception): - trigger_type = state_machine_input.etl_type - - response = notify( - lambda_client=LAMBDA_CLIENT, - function_name=ENVIRONMENT.NOTIFY_LAMBDA_ARN, - result=step_chain.result, - trigger_type=trigger_type, - ) - - return response diff --git a/src/etl/sds/etl_state_lock_enforcer/make/make.py b/src/etl/sds/etl_state_lock_enforcer/make/make.py deleted file mode 100644 index f33cd7068..000000000 --- a/src/etl/sds/etl_state_lock_enforcer/make/make.py +++ /dev/null @@ -1,4 +0,0 @@ -from builder.lambda_build import build - -if __name__ == "__main__": - build(__file__) diff --git a/src/etl/sds/etl_state_lock_enforcer/operations.py b/src/etl/sds/etl_state_lock_enforcer/operations.py deleted file mode 100644 index 7d364a000..000000000 --- a/src/etl/sds/etl_state_lock_enforcer/operations.py +++ /dev/null @@ -1,11 +0,0 @@ -from typing import TYPE_CHECKING - -from etl_utils.constants import ETL_STATE_LOCK -from etl_utils.trigger.operations import object_exists - -if TYPE_CHECKING: - from mypy_boto3_s3 import S3Client - - -def etl_state_lock_doesnt_exist_in_s3(s3_client: "S3Client", bucket: str): - return not object_exists(s3_client=s3_client, bucket=bucket, key=ETL_STATE_LOCK) diff --git a/src/etl/sds/etl_state_lock_enforcer/steps.py b/src/etl/sds/etl_state_lock_enforcer/steps.py deleted file mode 100644 index 8c2552ee2..000000000 --- a/src/etl/sds/etl_state_lock_enforcer/steps.py +++ /dev/null @@ -1,126 +0,0 @@ -from typing import TYPE_CHECKING, TypedDict - -from etl_utils.constants import ( - ETL_QUEUE_HISTORY, - ETL_STATE_LOCK, - ETL_STATE_MACHINE_HISTORY, - WorkerKey, -) -from etl_utils.trigger.model import StateMachineInput -from etl_utils.trigger.operations import start_execution, validate_state_keys_are_empty -from event.json import json_loads -from event.step_chain import StepChain - -from .operations import etl_state_lock_doesnt_exist_in_s3 - -if TYPE_CHECKING: - from mypy_boto3_s3 import S3Client - from mypy_boto3_stepfunctions import SFNClient - - -class StateLockExistsError(Exception): - """Custom exception for existing ETL state lock.""" - - pass - - -class Cache(TypedDict): - s3_client: "S3Client" - step_functions_client: "SFNClient" - state_machine_arn: str - etl_bucket: str - etl_extract_input_key: str - manual_retry_state: bool - - -def _process_sqs_message(data, cache: Cache): - message = data[StepChain.INIT] - body = json_loads(message["body"]) - state_machine_name = body["name"] - manual_retry_state = body["manual_retry"] - cache["manual_retry_state"] = manual_retry_state - - # Validate and create StateMachineInput instance - state_machine_input = StateMachineInput(**body) - - return state_machine_input, state_machine_name - - -def _check_etl_lock(data, cache: Cache): - manual_retry_state = cache["manual_retry_state"] - if not manual_retry_state: - _, state_machine_name = data[_process_sqs_message] - s3_client = cache["s3_client"] - etl_bucket = cache["etl_bucket"] - if etl_state_lock_doesnt_exist_in_s3(s3_client=s3_client, bucket=etl_bucket): - # Aquire state lock - return s3_client.put_object( - Bucket=cache["etl_bucket"], - Key=ETL_STATE_LOCK, - Body=state_machine_name, - ) - - else: - raise StateLockExistsError("ETL state lock already exists.") - - -def _validate_state_keys_are_empty(data, cache: Cache): - manual_retry_state = cache["manual_retry_state"] - if not manual_retry_state: - return validate_state_keys_are_empty( - s3_client=cache["s3_client"], bucket=cache["etl_bucket"] - ) - - -def _put_to_state_extract(data, cache: Cache): - manual_retry_state = cache["manual_retry_state"] - if not manual_retry_state: - _, state_machine_name = data[_process_sqs_message] - s3_client = cache["s3_client"] - etl_bucket = cache["etl_bucket"] - - return s3_client.copy_object( - Bucket=etl_bucket, - Key=WorkerKey.EXTRACT, - CopySource=f'{cache["etl_bucket"]}/{ETL_STATE_MACHINE_HISTORY}/{state_machine_name}', - ) - - -def _put_to_state_machine_history(data, cache: Cache): - manual_retry_state = cache["manual_retry_state"] - _, state_machine_name = data[_process_sqs_message] - s3_client = cache["s3_client"] - etl_bucket = cache["etl_bucket"] - if not manual_retry_state: - # Update state machine history file - return s3_client.copy_object( - Bucket=etl_bucket, - Key=f"{ETL_STATE_MACHINE_HISTORY}/{state_machine_name}", - CopySource=f"{etl_bucket}/{ETL_QUEUE_HISTORY}/{state_machine_name}", - ) - else: - return s3_client.put_object( - Bucket=etl_bucket, - Key=f"{ETL_STATE_MACHINE_HISTORY}/{state_machine_name}", - Body="retry", - ) - - -def _start_execution(data, cache): - state_machine_input, state_machine_name = data[_process_sqs_message] - return start_execution( - step_functions_client=cache["step_functions_client"], - state_machine_arn=cache["state_machine_arn"], - state_machine_input=state_machine_input, - state_machine_name=state_machine_name, - ) - - -steps = [ - _process_sqs_message, - _check_etl_lock, - _validate_state_keys_are_empty, - _put_to_state_machine_history, - _put_to_state_extract, - _start_execution, -] diff --git a/src/etl/sds/etl_state_lock_enforcer/tests/_test_etl_state_lock_enforcer.py b/src/etl/sds/etl_state_lock_enforcer/tests/_test_etl_state_lock_enforcer.py deleted file mode 100644 index d5ea33bae..000000000 --- a/src/etl/sds/etl_state_lock_enforcer/tests/_test_etl_state_lock_enforcer.py +++ /dev/null @@ -1,492 +0,0 @@ -import json -import os -import time -import uuid -from collections import deque -from functools import partial -from types import FunctionType -from unittest import mock - -import boto3 -import pytest -from botocore.exceptions import ClientError -from etl_utils.constants import ( - CHANGELOG_NUMBER, - ETL_QUEUE_HISTORY, - ETL_STATE_LOCK, - ETL_STATE_MACHINE_HISTORY, - WorkerKey, -) -from etl_utils.io import pkl_dumps_lz4 -from etl_utils.io.test.io_utils import pkl_loads_lz4 -from etl_utils.trigger.model import _create_timestamp -from etl_utils.trigger.operations import StateFileNotEmpty -from moto import mock_aws -from mypy_boto3_s3 import S3Client -from mypy_boto3_stepfunctions import SFNClient - -from etl.sds.etl_state_lock_enforcer.steps import _check_etl_lock, _start_execution -from test_helpers.sample_sqs_messages import ( - INVALID_BODY_JSON_EVENT, - QUEUE_BULK_HISTORY_FILE, - QUEUE_UPDATE_HISTORY_FILE, - STATE_MACHINE_BULK_HISTORY_FILE, - STATE_MACHINE_INPUT_TYPE_UPDATE, - STATE_MACHINE_UPDATE_HISTORY_FILE, - VALID_SQS_BULK_EVENT, - VALID_SQS_UPDATE_EVENT, -) -from test_helpers.terraform import read_terraform_output - -MOCKED_ETL_STATE_LOCK_ENFORCER_ENVIRONMENT = { - "AWS_DEFAULT_REGION": "us-east-1", - "STATE_MACHINE_ARN": "state-machine", - "NOTIFY_LAMBDA_ARN": "notify-lambda", - "ETL_BUCKET": "etl-bucket", - "SQS_QUEUE_URL": "sqs-queue", -} -ETL_BUCKET = MOCKED_ETL_STATE_LOCK_ENFORCER_ENVIRONMENT["ETL_BUCKET"] - - -@pytest.mark.parametrize( - ("message", "history_file"), - [ - (VALID_SQS_UPDATE_EVENT, QUEUE_UPDATE_HISTORY_FILE), - (VALID_SQS_BULK_EVENT, QUEUE_BULK_HISTORY_FILE), - ], - indirect=False, -) -def test_etl_state_lock_enforcer_state_lock_does_not_exist(message, history_file): - with mock_aws(), mock.patch.dict( - os.environ, MOCKED_ETL_STATE_LOCK_ENFORCER_ENVIRONMENT, clear=True - ), mock.patch("etl_utils.trigger.model.datetime") as mocked_datetime: - mocked_datetime.datetime.now().isoformat.return_value = "foo" - s3_client = boto3.client("s3") - - # Create history file - s3_client.create_bucket(Bucket=ETL_BUCKET) - s3_client.put_object( - Bucket=ETL_BUCKET, - Key=f"{history_file}", - Body="test", - ) - - from etl.sds.etl_state_lock_enforcer import etl_state_lock_enforcer - - # Mock the cache contents - etl_state_lock_enforcer.CACHE["s3_client"] = s3_client - - # Remove start execution, since it's meaningless for unit tests - if _start_execution in etl_state_lock_enforcer.steps: - idx = etl_state_lock_enforcer.steps.index(_start_execution) - etl_state_lock_enforcer.steps.pop(idx) - - # Don't execute the notify lambda - etl_state_lock_enforcer.notify = ( - lambda lambda_client, function_name, result, trigger_type: result - ) - - # Execute etl_state_lock_enforcer lambda - response = etl_state_lock_enforcer.handler(event=message) - - # Assert state_lock_file created - etl_state_lock_file = s3_client.get_object( - Bucket=ETL_BUCKET, Key=ETL_STATE_LOCK - ) - assert etl_state_lock_file - - # Assert changes put to state machine - state_machine_input = s3_client.get_object( - Bucket=ETL_BUCKET, - Key="input--extract/unprocessed", - ) - assert state_machine_input - - -@pytest.mark.parametrize( - ("message", "queue_history_file", "state_machine_history_file"), - [ - ( - VALID_SQS_UPDATE_EVENT, - QUEUE_UPDATE_HISTORY_FILE, - STATE_MACHINE_UPDATE_HISTORY_FILE, - ), - ( - VALID_SQS_BULK_EVENT, - QUEUE_BULK_HISTORY_FILE, - STATE_MACHINE_BULK_HISTORY_FILE, - ), - ], - indirect=False, -) -def test_etl_state_lock_enforcer_state_lock_exist( - message, queue_history_file, state_machine_history_file -): - with mock_aws(), mock.patch.dict( - os.environ, MOCKED_ETL_STATE_LOCK_ENFORCER_ENVIRONMENT, clear=True - ), mock.patch("etl_utils.trigger.model.datetime") as mocked_datetime: - mocked_datetime.datetime.now().isoformat.return_value = "foo" - s3_client = boto3.client("s3") - - # Create intermediate history file & state lock - s3_client.create_bucket(Bucket=ETL_BUCKET) - s3_client.put_object( - Bucket=ETL_BUCKET, - Key=f"{queue_history_file}", - Body="test-changes", - ) - - s3_client.put_object( - Bucket=ETL_BUCKET, - Key=ETL_STATE_LOCK, - Body="test-lock", - ) - - from etl.sds.etl_state_lock_enforcer import etl_state_lock_enforcer - - # Mock the cache contents - etl_state_lock_enforcer.CACHE["s3_client"] = s3_client - - # Don't execute the notify lambda - etl_state_lock_enforcer.notify = ( - lambda lambda_client, function_name, result, trigger_type: result - ) - - # Execute etl_state_lock_enforcer lambda - etl_state_lock_enforcer.handler(event=message) - - # Assert history file deleted - with pytest.raises(ClientError): - s3_client.get_object(Bucket=ETL_BUCKET, Key=f"{state_machine_history_file}") - - # Assert state lock still active - s3_client.get_object(Bucket=ETL_BUCKET, Key=ETL_STATE_LOCK) - - -# test process message -@pytest.mark.parametrize( - "message", - [VALID_SQS_UPDATE_EVENT, VALID_SQS_BULK_EVENT], -) -def test_etl_state_lock_enforcer_failure_state_file_not_empty(message): - with mock_aws(), mock.patch.dict( - os.environ, MOCKED_ETL_STATE_LOCK_ENFORCER_ENVIRONMENT, clear=True - ), mock.patch("etl_utils.trigger.model.datetime") as mocked_datetime: - mocked_datetime.datetime.now().isoformat.return_value = "foo" - s3_client = boto3.client("s3") - - # Create state file contents - s3_client.create_bucket(Bucket=ETL_BUCKET) - s3_client.put_object( - Bucket=ETL_BUCKET, - Key="input--extract/unprocessed", - Body="test", - ) - s3_client.put_object( - Bucket=ETL_BUCKET, - Key="input--transform/unprocessed", - Body="test", - ) - s3_client.put_object( - Bucket=ETL_BUCKET, - Key="input--load/unprocessed", - Body="test", - ) - - from etl.sds.etl_state_lock_enforcer import etl_state_lock_enforcer - - # Mock the cache contents - etl_state_lock_enforcer.CACHE["s3_client"] = s3_client - - # Don't execute the notify lambda - etl_state_lock_enforcer.notify = ( - lambda lambda_client, function_name, result, trigger_type: result - ) - - # Remove _check_etl_lock, as we want to test the step afterwards - if _check_etl_lock in etl_state_lock_enforcer.steps: - idx = etl_state_lock_enforcer.steps.index(_check_etl_lock) - etl_state_lock_enforcer.steps.pop(idx) - - # Execute etl_state_lock_enforcer lambda - result = etl_state_lock_enforcer.process_message(message=message["Records"][0]) - - assert isinstance(result, StateFileNotEmpty) - - -@pytest.mark.parametrize( - "message", - [INVALID_BODY_JSON_EVENT], -) -def test_etl_state_lock_enforcer_failure_invalid_json_message(message): - with mock_aws(), mock.patch.dict( - os.environ, MOCKED_ETL_STATE_LOCK_ENFORCER_ENVIRONMENT, clear=True - ), mock.patch("etl_utils.trigger.model.datetime") as mocked_datetime: - mocked_datetime.datetime.now().isoformat.return_value = "foo" - s3_client = boto3.client("s3") - - from etl.sds.etl_state_lock_enforcer import etl_state_lock_enforcer - - # Mock the cache contents - etl_state_lock_enforcer.CACHE["s3_client"] = s3_client - - # Don't execute the notify lambda - etl_state_lock_enforcer.notify = ( - lambda lambda_client, function_name, result, trigger_type: result - ) - - # Execute etl_state_lock_enforcer lambda - result = etl_state_lock_enforcer.process_message(message=message["Records"][0]) - - assert isinstance(result, json.decoder.JSONDecodeError) - - -# Integration test -UPDATE_CHANGELOG_NUMBER_START = 123 -UPDATE_CHANGELOG_NUMBER_END = 124 -EMPTY_LDIF_DATA = b"" -EMPTY_JSON_DATA = deque() -ALLOWED_EXCEPTIONS = (json.JSONDecodeError,) - - -def _ask_s3(s3_client: S3Client, bucket: str, key: str, question: FunctionType = None): - result = True - try: - response = s3_client.get_object(Bucket=bucket, Key=key) - except ClientError: - result = False - - if result and question is not None: - data = response["Body"].read() - try: - result = question(data) - except ALLOWED_EXCEPTIONS: - result = False - return result - - -def _ask_step_functions( - step_functions_client: SFNClient, - execution_arn: str, - question: FunctionType = None, -): - status = "" - try: - execution_state = step_functions_client.describe_execution( - executionArn=execution_arn, - ) - status = execution_state["status"] - except ClientError: - status = "FAILED" - - if status and question is not None: - status = question(status) - return status - - -@pytest.mark.timeout(30) -@pytest.mark.integration -def test_etl_state_lock_enforcer_trigger_update_success(): - # Where the state is located - etl_bucket = read_terraform_output("sds_etl.value.bucket") - sqs_queue_url = read_terraform_output( - "sds_etl.value.etl_state_lock_enforcer.sqs_queue_url" - ) - state_machine_arn = read_terraform_output("sds_etl.value.state_machine_arn") - timestamp = _create_timestamp().replace(":", ".") - intermediate_queue_history_file = f"{ETL_QUEUE_HISTORY}/{STATE_MACHINE_INPUT_TYPE_UPDATE}.{UPDATE_CHANGELOG_NUMBER_START}.{UPDATE_CHANGELOG_NUMBER_END}.{timestamp}" - state_machine_history_file = f"{ETL_STATE_MACHINE_HISTORY}/{STATE_MACHINE_INPUT_TYPE_UPDATE}.{UPDATE_CHANGELOG_NUMBER_START}.{UPDATE_CHANGELOG_NUMBER_END}.{timestamp}" - execution_arn = f"{state_machine_arn}:{STATE_MACHINE_INPUT_TYPE_UPDATE}.{UPDATE_CHANGELOG_NUMBER_START}.{UPDATE_CHANGELOG_NUMBER_END}.{timestamp}".replace( - "stateMachine", "execution" - ) - - # Set some questions - s3_client = boto3.client("s3") - ask_s3 = partial(_ask_s3, s3_client=s3_client, bucket=etl_bucket) - step_functions_client = boto3.client("stepfunctions") - ask_step_functions = partial( - _ask_step_functions, - step_functions_client=step_functions_client, - ) - - was_changelog_number_updated = lambda: ask_s3( - key=CHANGELOG_NUMBER, - question=lambda x: str(UPDATE_CHANGELOG_NUMBER_END).encode("utf-8"), - ) - was_state_machine_history_file_created = lambda: ask_s3( - key=state_machine_history_file - ) - was_state_machine_successful = ( - lambda: ask_s3( - key=WorkerKey.EXTRACT, - question=lambda x: x == b"", - ) - and ask_s3( - key=WorkerKey.TRANSFORM, - question=lambda x: pkl_loads_lz4(x) == deque(), - ) - and ask_s3( - key=WorkerKey.LOAD, - question=lambda x: pkl_loads_lz4(x) == deque(), - ) - and ask_step_functions( - execution_arn=execution_arn, question=lambda x: x == "SUCCEEDED" - ) - ) - was_etl_state_lock_removed = lambda: not ask_s3(key=ETL_STATE_LOCK) - - # Clear/set the initial state - s3_client.put_object(Bucket=etl_bucket, Key=WorkerKey.EXTRACT, Body=EMPTY_LDIF_DATA) - s3_client.put_object( - Bucket=etl_bucket, Key=WorkerKey.TRANSFORM, Body=pkl_dumps_lz4(EMPTY_JSON_DATA) - ) - s3_client.put_object( - Bucket=etl_bucket, Key=WorkerKey.LOAD, Body=pkl_dumps_lz4(EMPTY_JSON_DATA) - ) - s3_client.put_object( - Bucket=etl_bucket, - Key=CHANGELOG_NUMBER, - Body=str(UPDATE_CHANGELOG_NUMBER_START).encode("utf-8"), - ) - s3_client.put_object( - Bucket=etl_bucket, Key=intermediate_queue_history_file, Body=b"" - ) - s3_client.delete_object(Bucket=etl_bucket, Key=ETL_STATE_LOCK) - - # Trigger the etl_state_lock_enforcer lambda by sending message to queue - sqs_client = boto3.client("sqs") - sqs_client.send_message( - QueueUrl=f"{sqs_queue_url}", - MessageBody=json.dumps( - { - "changelog_number_start": UPDATE_CHANGELOG_NUMBER_START, - "changelog_number_end": UPDATE_CHANGELOG_NUMBER_END, - "etl_type": STATE_MACHINE_INPUT_TYPE_UPDATE, - "timestamp": f"{timestamp}", - "name": f"{STATE_MACHINE_INPUT_TYPE_UPDATE}.{UPDATE_CHANGELOG_NUMBER_START}.{UPDATE_CHANGELOG_NUMBER_END}.{timestamp}", - "manual_retry": False, - } - ), - MessageDeduplicationId=str(uuid.uuid4()), - MessageGroupId="state_machine_group", - ) - - changelog_number_updated = False - state_machine_history_file_created = False - state_machine_successful = False - etl_state_lock_removed = False - while not all( - ( - changelog_number_updated, - state_machine_history_file_created, - state_machine_successful, - etl_state_lock_removed, - ) - ): - time.sleep(5) - changelog_number_updated = ( - changelog_number_updated or was_changelog_number_updated() - ) - state_machine_history_file_created = ( - state_machine_history_file_created - or was_state_machine_history_file_created() - ) - state_machine_successful = ( - state_machine_successful or was_state_machine_successful() - ) - etl_state_lock_removed = etl_state_lock_removed or was_etl_state_lock_removed() - - # Confirm the final state - assert changelog_number_updated - assert state_machine_history_file_created - assert state_machine_successful - assert etl_state_lock_removed - - -@pytest.mark.timeout(30) -@pytest.mark.integration -def test_etl_state_lock_enforcer_trigger_update_rejected(): - # Where the state is located - etl_bucket = read_terraform_output("sds_etl.value.bucket") - sqs_queue_url = read_terraform_output( - "sds_etl.value.etl_state_lock_enforcer.sqs_queue_url" - ) - timestamp = _create_timestamp().replace(":", ".") - intermediate_queue_history_file = f"{ETL_QUEUE_HISTORY}/{STATE_MACHINE_INPUT_TYPE_UPDATE}.{UPDATE_CHANGELOG_NUMBER_START}.{UPDATE_CHANGELOG_NUMBER_END}.{timestamp}" - state_machine_history_file = f"{ETL_STATE_MACHINE_HISTORY}/{STATE_MACHINE_INPUT_TYPE_UPDATE}.{UPDATE_CHANGELOG_NUMBER_START}.{UPDATE_CHANGELOG_NUMBER_END}.{timestamp}" - - # Set some questions - s3_client = boto3.client("s3") - ask_s3 = partial(_ask_s3, s3_client=s3_client, bucket=etl_bucket) - - was_changelog_number_not_updated = lambda: ask_s3( - key=CHANGELOG_NUMBER, - question=lambda x: str(UPDATE_CHANGELOG_NUMBER_START).encode("utf-8"), - ) - was_etl_state_lock_not_removed = lambda: ask_s3(key=ETL_STATE_LOCK) - was_state_machine_history_file_not_created = lambda: not ask_s3( - key=state_machine_history_file - ) - - # Clear/set the initial state - s3_client.put_object(Bucket=etl_bucket, Key=WorkerKey.EXTRACT, Body=EMPTY_LDIF_DATA) - s3_client.put_object( - Bucket=etl_bucket, Key=WorkerKey.TRANSFORM, Body=pkl_dumps_lz4(EMPTY_JSON_DATA) - ) - s3_client.put_object( - Bucket=etl_bucket, Key=WorkerKey.LOAD, Body=pkl_dumps_lz4(EMPTY_JSON_DATA) - ) - s3_client.put_object( - Bucket=etl_bucket, - Key=CHANGELOG_NUMBER, - Body=str(UPDATE_CHANGELOG_NUMBER_START).encode("utf-8"), - ) - s3_client.put_object( - Bucket=etl_bucket, Key=intermediate_queue_history_file, Body=b"" - ) - s3_client.put_object(Bucket=etl_bucket, Key=ETL_STATE_LOCK, Body="locked") - - # Trigger the etl_state_lock_enforcer lambda by sending message to queue - sqs_client = boto3.client("sqs") - sqs_client.send_message( - QueueUrl=f"{sqs_queue_url}", - MessageBody=json.dumps( - { - "changelog_number_start": UPDATE_CHANGELOG_NUMBER_START, - "changelog_number_end": UPDATE_CHANGELOG_NUMBER_END, - "etl_type": STATE_MACHINE_INPUT_TYPE_UPDATE, - "timestamp": f"{timestamp}", - "name": f"{STATE_MACHINE_INPUT_TYPE_UPDATE}.{UPDATE_CHANGELOG_NUMBER_START}.{UPDATE_CHANGELOG_NUMBER_END}.{timestamp}", - "manual_retry": False, - } - ), - MessageDeduplicationId=str(uuid.uuid4()), - MessageGroupId="state_machine_group", - ) - - changelog_number_not_updated = False - etl_state_lock_not_removed = False - state_machine_history_file_not_created = False - while not all( - ( - changelog_number_not_updated, - etl_state_lock_not_removed, - state_machine_history_file_not_created, - ) - ): - time.sleep(5) - changelog_number_not_updated = ( - changelog_number_not_updated or was_changelog_number_not_updated() - ) - etl_state_lock_not_removed = ( - etl_state_lock_not_removed or was_etl_state_lock_not_removed() - ) - state_machine_history_file_not_created = ( - state_machine_history_file_not_created - or was_state_machine_history_file_not_created() - ) - - # Confirm the final state - assert changelog_number_not_updated - assert etl_state_lock_not_removed - assert state_machine_history_file_not_created diff --git a/src/etl/sds/etl_state_lock_enforcer/tests/test_etl_state_lock_enforcer_operations.py b/src/etl/sds/etl_state_lock_enforcer/tests/test_etl_state_lock_enforcer_operations.py deleted file mode 100644 index bda329535..000000000 --- a/src/etl/sds/etl_state_lock_enforcer/tests/test_etl_state_lock_enforcer_operations.py +++ /dev/null @@ -1,40 +0,0 @@ -import os -from typing import TYPE_CHECKING -from unittest import mock - -import boto3 -import pytest -from etl_utils.constants import ETL_STATE_LOCK -from moto import mock_aws - -from etl.sds.etl_state_lock_enforcer.operations import etl_state_lock_doesnt_exist_in_s3 - -if TYPE_CHECKING: - from mypy_boto3_s3 import S3Client - -BUCKET_NAME = "test-bucket" - - -@pytest.fixture -def s3_client(): - with mock_aws(), mock.patch.dict( - os.environ, {"AWS_DEFAULT_REGION": "us-east-1"}, clear=True - ): - s3_client = boto3.client("s3") - s3_client.create_bucket(Bucket=BUCKET_NAME) - yield s3_client - - -def test_etl_state_lock_doesnt_exist_in_s3_when_lock_exists(s3_client: "S3Client"): - s3_client.put_object(Bucket=BUCKET_NAME, Key=ETL_STATE_LOCK, Body="state_lock") - - # Assert that the function returns False (indicating the lock exists) - assert not etl_state_lock_doesnt_exist_in_s3( - s3_client=s3_client, bucket=BUCKET_NAME - ) - - -def test_etl_state_lock_doesnt_exist_in_s3_when_lock_does_not_exist( - s3_client: "S3Client", -): - assert etl_state_lock_doesnt_exist_in_s3(s3_client=s3_client, bucket=BUCKET_NAME) diff --git a/src/etl/sds/tests/changelog/changelog_components/add/accredited_system.SameProduct.ldif b/src/etl/sds/tests/changelog/changelog_components/add/accredited_system.SameProduct.ldif deleted file mode 100644 index 98a9437ea..000000000 --- a/src/etl/sds/tests/changelog/changelog_components/add/accredited_system.SameProduct.ldif +++ /dev/null @@ -1,18 +0,0 @@ -dn: uniqueIdentifier=000000000002,ou=Services,o=nhs -changeType: add -objectClass: nhsAs -objectClass: top -nhsApproverURP: uniqueIdentifier=102583034545,uniqueIdentifier=352307522545,uid=432776896545,ou=People,o=nhs -nhsAsClient: C3O9X -nhsAsSvcIA: urn:nhs:names:services:pdsquery:QUQI_IN010000UK14 -nhsAsSvcIA: urn:nhs:names:services:pdsquery:QUQI_IN010000UK15 -nhsDateApproved: 20091016133823 -nhsDateRequested: 20091016133757 -nhsIDCode: C3O9X -nhsMHSPartyKey: C3O9X-806782 -nhsProductKey: 6216 -nhsProductName: TPP SystmOne -nhsRequestorURP: uniqueIdentifier=203171972540,uniqueIdentifier=352307522545,uid=432776896545,ou=People,o=nhs -nhsTempUid: 10312 -uniqueIdentifier: 000000000002 -nhsMhsManufacturerOrg: C3O9X diff --git a/src/etl/sds/tests/changelog/changelog_components/add/accredited_system.SameProductAsMhs.ldif b/src/etl/sds/tests/changelog/changelog_components/add/accredited_system.SameProductAsMhs.ldif deleted file mode 100644 index 0ea613d2e..000000000 --- a/src/etl/sds/tests/changelog/changelog_components/add/accredited_system.SameProductAsMhs.ldif +++ /dev/null @@ -1,18 +0,0 @@ -dn: uniqueIdentifier=000000000001,ou=Services,o=nhs -changeType: add -objectClass: nhsAs -objectClass: top -nhsApproverURP: uniqueIdentifier=102583034545,uniqueIdentifier=352307522545,uid=432776896545,ou=People,o=nhs -nhsAsClient: C3O9X -nhsAsSvcIA: urn:nhs:names:services:ers:READ_PRACTITIONER_ROLE_R4_V001 -nhsAsSvcIA: urn:nhs:names:services:pdsquery:QUQI_IN010000UK14 -nhsDateApproved: 20091016133823 -nhsDateRequested: 20091016133757 -nhsIDCode: C3O9X -nhsMHSPartyKey: C3O9X-823610 -nhsProductKey: 11929 -nhsProductName: TPP SystmOne -nhsRequestorURP: uniqueIdentifier=203171972540,uniqueIdentifier=352307522545,uid=432776896545,ou=People,o=nhs -nhsTempUid: 10312 -uniqueIdentifier: 000000000001 -nhsMhsManufacturerOrg: C3O9X diff --git a/src/etl/sds/tests/changelog/changelog_components/add/accredited_system.SameProductTeam.DifferentProduct.ldif b/src/etl/sds/tests/changelog/changelog_components/add/accredited_system.SameProductTeam.DifferentProduct.ldif deleted file mode 100644 index 1afb79ad6..000000000 --- a/src/etl/sds/tests/changelog/changelog_components/add/accredited_system.SameProductTeam.DifferentProduct.ldif +++ /dev/null @@ -1,17 +0,0 @@ -dn: uniqueIdentifier=000000000001,ou=Services,o=nhs -changeType: add -objectClass: nhsAs -objectClass: top -nhsApproverURP: uniqueIdentifier=102583034545,uniqueIdentifier=352307522545,uid=432776896545,ou=People,o=nhs -nhsAsClient: AAA -nhsAsSvcIA: urn:nhs:names:services:pdsquery:QUQI_IN010000UK15 -nhsDateApproved: 20091016133823 -nhsDateRequested: 20091016133757 -nhsIDCode: AAA -nhsMHSPartyKey: AAA-806782 -nhsProductKey: 6216 -nhsProductName: TPP SystmOne -nhsRequestorURP: uniqueIdentifier=203171972540,uniqueIdentifier=352307522545,uid=432776896545,ou=People,o=nhs -nhsTempUid: 10312 -uniqueIdentifier: 000000000002 -nhsMhsManufacturerOrg: AAA diff --git a/src/etl/sds/tests/changelog/changelog_components/add/accredited_system.ldif b/src/etl/sds/tests/changelog/changelog_components/add/accredited_system.ldif deleted file mode 100644 index 90eae5a21..000000000 --- a/src/etl/sds/tests/changelog/changelog_components/add/accredited_system.ldif +++ /dev/null @@ -1,17 +0,0 @@ -dn: uniqueIdentifier=000000000001,ou=Services,o=nhs -changeType: add -objectClass: nhsAs -objectClass: top -nhsApproverURP: uniqueIdentifier=102583034545,uniqueIdentifier=352307522545,uid=432776896545,ou=People,o=nhs -nhsAsClient: C3O9X -nhsAsSvcIA: urn:nhs:names:services:pdsquery:QUQI_IN010000UK14 -nhsDateApproved: 20091016133823 -nhsDateRequested: 20091016133757 -nhsIDCode: C3O9X -nhsMHSPartyKey: C3O9X-806782 -nhsProductKey: 6216 -nhsProductName: TPP SystmOne -nhsRequestorURP: uniqueIdentifier=203171972540,uniqueIdentifier=352307522545,uid=432776896545,ou=People,o=nhs -nhsTempUid: 10312 -uniqueIdentifier: 000000000001 -nhsMhsManufacturerOrg: C3O9X diff --git a/src/etl/sds/tests/changelog/changelog_components/add/accredited_system_with_acf.ldif b/src/etl/sds/tests/changelog/changelog_components/add/accredited_system_with_acf.ldif deleted file mode 100644 index f07c7a9fa..000000000 --- a/src/etl/sds/tests/changelog/changelog_components/add/accredited_system_with_acf.ldif +++ /dev/null @@ -1,18 +0,0 @@ -dn: uniqueIdentifier=000000000001,ou=Services,o=nhs -changeType: add -objectClass: nhsAs -objectClass: top -nhsApproverURP: uniqueIdentifier=102583034545,uniqueIdentifier=352307522545,uid=432776896545,ou=People,o=nhs -nhsAsClient: C3O9X -nhsAsSvcIA: urn:nhs:names:services:pdsquery:QUQI_IN010000UK14 -nhsDateApproved: 20091016133823 -nhsDateRequested: 20091016133757 -nhsIDCode: C3O9X -nhsMHSPartyKey: C3O9X-806782 -nhsProductKey: 6216 -nhsProductName: TPP SystmOne -nhsRequestorURP: uniqueIdentifier=203171972540,uniqueIdentifier=352307522545,uid=432776896545,ou=People,o=nhs -nhsTempUid: 10312 -uniqueIdentifier: 000000000001 -nhsMhsManufacturerOrg: C3O9X -nhsAsACF: AS ACF diff --git a/src/etl/sds/tests/changelog/changelog_components/add/message_handling_system.AnotherWithDifferentUniqueIdentifier.ldif b/src/etl/sds/tests/changelog/changelog_components/add/message_handling_system.AnotherWithDifferentUniqueIdentifier.ldif deleted file mode 100644 index 2a35ba781..000000000 --- a/src/etl/sds/tests/changelog/changelog_components/add/message_handling_system.AnotherWithDifferentUniqueIdentifier.ldif +++ /dev/null @@ -1,25 +0,0 @@ -dn: uniqueIdentifier=123456,ou=Services,o=nhs -changeType: add -objectClass: nhsMhs -objectClass: top -nhsApproverURP: uniqueidentifier=555050304105,uniqueidentifier=555008548101,uid=555008545108,ou=people, o=nhs -nhsContractPropertyTemplateKey: 14 -nhsDateApproved: 20231030092939 -nhsDateDNSApproved: 20231030092939 -nhsDateRequested: 20231030092906 -nhsDNSApprover: uniqueidentifier=555050304105,uniqueidentifier=555008548101,uid=555008545108,ou=people, o=nhs -nhsEPInteractionType: FHIR -nhsIDCode: C3O9X -nhsMhsCPAId: 123456 -nhsMHSEndPoint: https://test.C3O9X.nhs.uk/ -nhsMhsFQDN: test.C3O9X.nhs.uk -nhsMHsIN: READ_PRACTITIONER_ROLE_R4_V002 -nhsMHSIsAuthenticated: none -nhsMHSPartyKey: C3O9X-823610 -nhsMHsSN: urn:nhs:names:services:ers -nhsMhsSvcIA: urn:nhs:names:services:ers:READ_PRACTITIONER_ROLE_R4_V002 -nhsProductKey: 11929 -nhsProductVersion: Mar2023 -nhsRequestorURP: uniqueidentifier=555050304105,uniqueidentifier=555008548101,uid=555008545108,ou=people, o=nhs -nhsMhsManufacturerOrg: C3O9X -uniqueIdentifier: 123456 diff --git a/src/etl/sds/tests/changelog/changelog_components/add/message_handling_system.SameProduct.ldif b/src/etl/sds/tests/changelog/changelog_components/add/message_handling_system.SameProduct.ldif deleted file mode 100644 index 7a7845198..000000000 --- a/src/etl/sds/tests/changelog/changelog_components/add/message_handling_system.SameProduct.ldif +++ /dev/null @@ -1,24 +0,0 @@ -dn: uniqueIdentifier=00000a84594b2ef34279,ou=Services,o=nhs -changeType: add -objectClass: nhsMhs -objectClass: top -nhsApproverURP: uniqueidentifier=555050304105,uniqueidentifier=555008548101,uid=555008545108,ou=people, o=nhs -nhsContractPropertyTemplateKey: 14 -nhsDateApproved: 20231030092939 -nhsDateDNSApproved: 20231030092939 -nhsDateRequested: 20231030092906 -nhsDNSApprover: uniqueidentifier=555050304105,uniqueidentifier=555008548101,uid=555008545108,ou=people, o=nhs -nhsEPInteractionType: FHIR -nhsIDCode: C3O9X -nhsMhsCPAId: 123456 -nhsMHSEndPoint: https://test.C3O9X.nhs.uk/ -nhsMhsFQDN: test.C3O9X.nhs.uk -nhsMHsIN: READ_PRACTITIONER_ROLE_R4_V002 -nhsMHSIsAuthenticated: none -nhsMHSPartyKey: C3O9X-823610 -nhsMHsSN: urn:nhs:names:services:ers -nhsMhsSvcIA: urn:nhs:names:services:ers:READ_PRACTITIONER_ROLE_R4_V002 -nhsProductKey: 11929 -nhsRequestorURP: uniqueidentifier=555050304105,uniqueidentifier=555008548101,uid=555008545108,ou=people, o=nhs -nhsMhsManufacturerOrg: C3O9X -uniqueIdentifier: 123456 diff --git a/src/etl/sds/tests/changelog/changelog_components/add/message_handling_system.SameProductTeam.DifferentProduct.ldif b/src/etl/sds/tests/changelog/changelog_components/add/message_handling_system.SameProductTeam.DifferentProduct.ldif deleted file mode 100644 index 5fa3a9d09..000000000 --- a/src/etl/sds/tests/changelog/changelog_components/add/message_handling_system.SameProductTeam.DifferentProduct.ldif +++ /dev/null @@ -1,24 +0,0 @@ -dn: uniqueIdentifier=00000a84594b2ef34279,ou=Services,o=nhs -changeType: add -objectClass: nhsMhs -objectClass: top -nhsApproverURP: uniqueidentifier=555050304105,uniqueidentifier=555008548101,uid=555008545108,ou=people, o=nhs -nhsContractPropertyTemplateKey: 14 -nhsDateApproved: 20231030092939 -nhsDateDNSApproved: 20231030092939 -nhsDateRequested: 20231030092906 -nhsDNSApprover: uniqueidentifier=555050304105,uniqueidentifier=555008548101,uid=555008545108,ou=people, o=nhs -nhsEPInteractionType: FHIR -nhsIDCode: AAA -nhsMhsCPAId: 123456 -nhsMHSEndPoint: https://test.AAA.nhs.uk/ -nhsMhsFQDN: test.AAA.nhs.uk -nhsMHsIN: READ_PRACTITIONER_ROLE_R4_V002 -nhsMHSIsAuthenticated: none -nhsMHSPartyKey: AAA-823610 -nhsMHsSN: urn:nhs:names:services:ers -nhsMhsSvcIA: urn:nhs:names:services:ers:READ_PRACTITIONER_ROLE_R4_V002 -nhsProductKey: 11929 -nhsRequestorURP: uniqueidentifier=555050304105,uniqueidentifier=555008548101,uid=555008545108,ou=people, o=nhs -nhsMhsManufacturerOrg: C3O9X -uniqueIdentifier: 123456 diff --git a/src/etl/sds/tests/changelog/changelog_components/add/message_handling_system.WithProductName.ldif b/src/etl/sds/tests/changelog/changelog_components/add/message_handling_system.WithProductName.ldif deleted file mode 100644 index 7ccbb232c..000000000 --- a/src/etl/sds/tests/changelog/changelog_components/add/message_handling_system.WithProductName.ldif +++ /dev/null @@ -1,25 +0,0 @@ -dn: uniqueIdentifier=00000a84594b2ef34279,ou=Services,o=nhs -changeType: add -objectClass: nhsMhs -objectClass: top -nhsApproverURP: uniqueidentifier=555050304105,uniqueidentifier=555008548101,uid=555008545108,ou=people, o=nhs -nhsContractPropertyTemplateKey: 14 -nhsDateApproved: 20231030092939 -nhsDateDNSApproved: 20231030092939 -nhsDateRequested: 20231030092906 -nhsDNSApprover: uniqueidentifier=555050304105,uniqueidentifier=555008548101,uid=555008545108,ou=people, o=nhs -nhsEPInteractionType: FHIR -nhsIDCode: C3O9X -nhsMhsCPAId: 00000a84594b2ef34279 -nhsMHSEndPoint: https://test.C3O9X.nhs.uk/ -nhsMhsFQDN: test.C3O9X.nhs.uk -nhsMHsIN: READ_PRACTITIONER_ROLE_R4_V001 -nhsMHSIsAuthenticated: none -nhsMHSPartyKey: C3O9X-823610 -nhsMHsSN: urn:nhs:names:services:ers -nhsMhsSvcIA: urn:nhs:names:services:ers:READ_PRACTITIONER_ROLE_R4_V001 -nhsProductKey: 11929 -nhsRequestorURP: uniqueidentifier=555050304105,uniqueidentifier=555008548101,uid=555008545108,ou=people, o=nhs -nhsMhsManufacturerOrg: C3O9X -nhsProductName: My SPINE Product -uniqueIdentifier: 00000a84594b2ef34279 diff --git a/src/etl/sds/tests/changelog/changelog_components/add/message_handling_system.ldif b/src/etl/sds/tests/changelog/changelog_components/add/message_handling_system.ldif deleted file mode 100644 index e0348b606..000000000 --- a/src/etl/sds/tests/changelog/changelog_components/add/message_handling_system.ldif +++ /dev/null @@ -1,24 +0,0 @@ -dn: uniqueIdentifier=00000a84594b2ef34279,ou=Services,o=nhs -changeType: add -objectClass: nhsMhs -objectClass: top -nhsApproverURP: uniqueidentifier=555050304105,uniqueidentifier=555008548101,uid=555008545108,ou=people, o=nhs -nhsContractPropertyTemplateKey: 14 -nhsDateApproved: 20231030092939 -nhsDateDNSApproved: 20231030092939 -nhsDateRequested: 20231030092906 -nhsDNSApprover: uniqueidentifier=555050304105,uniqueidentifier=555008548101,uid=555008545108,ou=people, o=nhs -nhsEPInteractionType: FHIR -nhsIDCode: C3O9X -nhsMhsCPAId: 00000a84594b2ef34279 -nhsMHSEndPoint: https://test.C3O9X.nhs.uk/ -nhsMhsFQDN: test.C3O9X.nhs.uk -nhsMHsIN: READ_PRACTITIONER_ROLE_R4_V001 -nhsMHSIsAuthenticated: none -nhsMHSPartyKey: C3O9X-823610 -nhsMHsSN: urn:nhs:names:services:ers -nhsMhsSvcIA: urn:nhs:names:services:ers:READ_PRACTITIONER_ROLE_R4_V001 -nhsProductKey: 11929 -nhsRequestorURP: uniqueidentifier=555050304105,uniqueidentifier=555008548101,uid=555008545108,ou=people, o=nhs -nhsMhsManufacturerOrg: C3O9X -uniqueIdentifier: 00000a84594b2ef34279 diff --git a/src/etl/sds/tests/changelog/changelog_components/delete/accredited_system.ldif b/src/etl/sds/tests/changelog/changelog_components/delete/accredited_system.ldif deleted file mode 100644 index 1b410a388..000000000 --- a/src/etl/sds/tests/changelog/changelog_components/delete/accredited_system.ldif +++ /dev/null @@ -1,5 +0,0 @@ -dn: uniqueIdentifier=000000000001,ou=Services,o=nhs -objectClass: delete -objectClass: top -changeType: delete -uniqueIdentifier: 000000000001 diff --git a/src/etl/sds/tests/changelog/changelog_components/delete/message_handling_system.ldif b/src/etl/sds/tests/changelog/changelog_components/delete/message_handling_system.ldif deleted file mode 100644 index dcae648e2..000000000 --- a/src/etl/sds/tests/changelog/changelog_components/delete/message_handling_system.ldif +++ /dev/null @@ -1,5 +0,0 @@ -dn: uniqueIdentifier=00000a84594b2ef34279,ou=Services,o=nhs -objectClass: delete -objectClass: top -changeType: delete -uniqueIdentifier: 00000a84594b2ef34279 diff --git a/src/etl/sds/tests/changelog/changelog_components/delete/unknown_entity.ldif b/src/etl/sds/tests/changelog/changelog_components/delete/unknown_entity.ldif deleted file mode 100644 index aa14cffbe..000000000 --- a/src/etl/sds/tests/changelog/changelog_components/delete/unknown_entity.ldif +++ /dev/null @@ -1,5 +0,0 @@ -dn: uniqueIdentifier=1234,ou=Services,o=nhs -objectClass: delete -objectClass: top -changeType: delete -uniqueIdentifier: 1234 diff --git a/src/etl/sds/tests/changelog/changelog_components/modify/accredited_system/add/description.ldif b/src/etl/sds/tests/changelog/changelog_components/modify/accredited_system/add/description.ldif deleted file mode 100644 index 52ea6c131..000000000 --- a/src/etl/sds/tests/changelog/changelog_components/modify/accredited_system/add/description.ldif +++ /dev/null @@ -1,2 +0,0 @@ -add: description -description: description... diff --git a/src/etl/sds/tests/changelog/changelog_components/modify/accredited_system/add/nhs_as_category_bag.ldif b/src/etl/sds/tests/changelog/changelog_components/modify/accredited_system/add/nhs_as_category_bag.ldif deleted file mode 100644 index 59bbe4387..000000000 --- a/src/etl/sds/tests/changelog/changelog_components/modify/accredited_system/add/nhs_as_category_bag.ldif +++ /dev/null @@ -1,2 +0,0 @@ -add: nhsAsCategoryBag -nhsAsCategoryBag: Cat bag example diff --git a/src/etl/sds/tests/changelog/changelog_components/modify/accredited_system/add/nhs_as_client.ldif b/src/etl/sds/tests/changelog/changelog_components/modify/accredited_system/add/nhs_as_client.ldif deleted file mode 100644 index a32d6ae5b..000000000 --- a/src/etl/sds/tests/changelog/changelog_components/modify/accredited_system/add/nhs_as_client.ldif +++ /dev/null @@ -1,2 +0,0 @@ -add: nhsAsClient -nhsAsClient: nhs_as_client_2 diff --git a/src/etl/sds/tests/changelog/changelog_components/modify/accredited_system/add/nhs_as_svc_ia.Duplicate.ldif b/src/etl/sds/tests/changelog/changelog_components/modify/accredited_system/add/nhs_as_svc_ia.Duplicate.ldif deleted file mode 100644 index 46a5efd70..000000000 --- a/src/etl/sds/tests/changelog/changelog_components/modify/accredited_system/add/nhs_as_svc_ia.Duplicate.ldif +++ /dev/null @@ -1,2 +0,0 @@ -add: nhsAsSvcIA -nhsAsSvcIA: urn:nhs:names:services:ers:READ_PRACTITIONER_ROLE_R4_V001 diff --git a/src/etl/sds/tests/changelog/changelog_components/modify/accredited_system/add/nhs_as_svc_ia.Multiple.ldif b/src/etl/sds/tests/changelog/changelog_components/modify/accredited_system/add/nhs_as_svc_ia.Multiple.ldif deleted file mode 100644 index b440eb6a6..000000000 --- a/src/etl/sds/tests/changelog/changelog_components/modify/accredited_system/add/nhs_as_svc_ia.Multiple.ldif +++ /dev/null @@ -1,3 +0,0 @@ -add: nhsAsSvcIA -nhsAsSvcIA: urn:nhs:names:services:pdsquery:123456 -nhsAsSvcIA: new-interaction-id-2 diff --git a/src/etl/sds/tests/changelog/changelog_components/modify/accredited_system/add/nhs_as_svc_ia.ldif b/src/etl/sds/tests/changelog/changelog_components/modify/accredited_system/add/nhs_as_svc_ia.ldif deleted file mode 100644 index a492cb974..000000000 --- a/src/etl/sds/tests/changelog/changelog_components/modify/accredited_system/add/nhs_as_svc_ia.ldif +++ /dev/null @@ -1,2 +0,0 @@ -add: nhsAsSvcIA -nhsAsSvcIA: urn:nhs:names:services:pdsquery:123456 diff --git a/src/etl/sds/tests/changelog/changelog_components/modify/accredited_system/base.ldif b/src/etl/sds/tests/changelog/changelog_components/modify/accredited_system/base.ldif deleted file mode 100644 index ea3859b75..000000000 --- a/src/etl/sds/tests/changelog/changelog_components/modify/accredited_system/base.ldif +++ /dev/null @@ -1,6 +0,0 @@ -dn: uniqueIdentifier=000000000001,ou=Services,o=nhs -changeType: modify -<