Skip to content

Commit 733770c

Browse files
committed
FIxed batch
1 parent b96ab5e commit 733770c

File tree

4 files changed

+14
-2
lines changed

4 files changed

+14
-2
lines changed

infra/s3_source_bucket.tf

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,5 @@
11
locals {
2-
bucket_name = local.immunisation_account_id == "084828561157" ? "immunisation-batch-${local.account}-preprod-data-sources" : "immunisation-batch-${local.account}-data-sources"
2+
bucket_name = local.immunisation_account_id == "084828561157" ? "immunisation-batch-preprod-${local.account}-data-sources" : "immunisation-batch-${local.account}-data-sources"
33
}
44

55
# Overall entry point into batch in prod. Files are forwarded into the appropriate blue / green bucket.

terraform/Makefile

Lines changed: 4 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -60,3 +60,7 @@ catch-all-zip:
6060

6161
tf-%:
6262
$(tf_cmd) $*
63+
64+
import:
65+
$(tf_cmd) import $(tf_vars) $(to) $(id)
66+

terraform/file_name_processor.tf

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -291,7 +291,7 @@ resource "aws_lambda_function" "file_processor_lambda" {
291291
SPLUNK_FIREHOSE_NAME = module.splunk.firehose_stream_name
292292
AUDIT_TABLE_NAME = aws_dynamodb_table.audit-table.name
293293
FILE_NAME_GSI = "filename_index"
294-
FILE_NAME_PROC_LAMBDA_NAME = "imms-${local.env}-filenameproc_lambda"
294+
FILE_NAME_PROC_LAMBDA_NAME = "${local.short_prefix}-filenameproc_lambda"
295295

296296
}
297297
}

terraform/s3_config.tf

Lines changed: 8 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -92,12 +92,14 @@ resource "aws_s3_bucket_lifecycle_configuration" "datasources_lifecycle" {
9292
}
9393
}
9494

95+
# REQUIRES IMPORTING: Once deployed to green this needs to be imported to blue because resource exists after first run
9596
resource "aws_s3_bucket" "batch_data_destination_bucket" {
9697
# Deliberately not using `local.batch_prefix` as we don't want separate blue / green destinations in prod.
9798
bucket = "immunisation-batch-${local.unique_name}-data-destinations"
9899
force_destroy = local.is_temp
99100
}
100101

102+
# REQUIRES IMPORTING: Once deployed to green this needs to be imported to blue because resource exists after first run
101103
resource "aws_s3_bucket_public_access_block" "batch_data_destination_bucket_public_access_block" {
102104
bucket = aws_s3_bucket.batch_data_destination_bucket.id
103105

@@ -107,6 +109,7 @@ resource "aws_s3_bucket_public_access_block" "batch_data_destination_bucket_publ
107109
restrict_public_buckets = true
108110
}
109111

112+
# REQUIRES IMPORTING: Once deployed to green this needs to be imported to blue because resource exists after first run
110113
resource "aws_s3_bucket_policy" "batch_data_destination_bucket_policy" {
111114
bucket = aws_s3_bucket.batch_data_destination_bucket.id
112115
policy = jsonencode({
@@ -151,6 +154,7 @@ resource "aws_s3_bucket_policy" "batch_data_destination_bucket_policy" {
151154
})
152155
}
153156

157+
# REQUIRES IMPORTING: Once deployed to green this needs to be imported to blue because resource exists after first run
154158
resource "aws_s3_bucket_server_side_encryption_configuration" "s3_batch_destination_encryption" {
155159
bucket = aws_s3_bucket.batch_data_destination_bucket.id
156160

@@ -162,6 +166,7 @@ resource "aws_s3_bucket_server_side_encryption_configuration" "s3_batch_destinat
162166
}
163167
}
164168

169+
# REQUIRES IMPORTING: Once deployed to green this needs to be imported to blue because resource exists after first run
165170
resource "aws_s3_bucket_lifecycle_configuration" "data_destinations" {
166171
bucket = aws_s3_bucket.batch_data_destination_bucket.id
167172

@@ -192,13 +197,15 @@ resource "aws_s3_bucket_lifecycle_configuration" "data_destinations" {
192197
}
193198
}
194199

200+
# REQUIRES IMPORTING: Once deployed to green this needs to be imported to blue because resource exists after first run
195201
resource "aws_s3_bucket" "batch_config_bucket" {
196202
# For now, only create in internal-dev and prod as we only have one shared Redis instance per account.
197203
count = local.create_config_bucket ? 1 : 0
198204

199205
bucket = "imms-${local.unique_name}-supplier-config"
200206
}
201207

208+
# REQUIRES IMPORTING: Once deployed to green this needs to be imported to blue because resource exists after first run
202209
resource "aws_s3_bucket_public_access_block" "batch_config_bucket_public_access_block" {
203210
count = local.create_config_bucket ? 1 : 0
204211

@@ -210,6 +217,7 @@ resource "aws_s3_bucket_public_access_block" "batch_config_bucket_public_access_
210217
restrict_public_buckets = true
211218
}
212219

220+
# REQUIRES IMPORTING: Once deployed to green this needs to be imported to blue because resource exists after first run
213221
resource "aws_s3_bucket_policy" "batch_config_bucket_policy" {
214222
count = local.create_config_bucket ? 1 : 0
215223

0 commit comments

Comments
 (0)