@@ -92,12 +92,14 @@ resource "aws_s3_bucket_lifecycle_configuration" "datasources_lifecycle" {
9292 }
9393}
9494
95+ # REQUIRES IMPORTING: Once deployed to green this needs to be imported to blue because resource exists after first run
9596resource "aws_s3_bucket" "batch_data_destination_bucket" {
9697 # Deliberately not using `local.batch_prefix` as we don't want separate blue / green destinations in prod.
9798 bucket = " immunisation-batch-${ local . unique_name } -data-destinations"
9899 force_destroy = local. is_temp
99100}
100101
102+ # REQUIRES IMPORTING: Once deployed to green this needs to be imported to blue because resource exists after first run
101103resource "aws_s3_bucket_public_access_block" "batch_data_destination_bucket_public_access_block" {
102104 bucket = aws_s3_bucket. batch_data_destination_bucket . id
103105
@@ -107,6 +109,7 @@ resource "aws_s3_bucket_public_access_block" "batch_data_destination_bucket_publ
107109 restrict_public_buckets = true
108110}
109111
112+ # REQUIRES IMPORTING: Once deployed to green this needs to be imported to blue because resource exists after first run
110113resource "aws_s3_bucket_policy" "batch_data_destination_bucket_policy" {
111114 bucket = aws_s3_bucket. batch_data_destination_bucket . id
112115 policy = jsonencode ({
@@ -151,6 +154,7 @@ resource "aws_s3_bucket_policy" "batch_data_destination_bucket_policy" {
151154 })
152155}
153156
157+ # REQUIRES IMPORTING: Once deployed to green this needs to be imported to blue because resource exists after first run
154158resource "aws_s3_bucket_server_side_encryption_configuration" "s3_batch_destination_encryption" {
155159 bucket = aws_s3_bucket. batch_data_destination_bucket . id
156160
@@ -162,6 +166,7 @@ resource "aws_s3_bucket_server_side_encryption_configuration" "s3_batch_destinat
162166 }
163167}
164168
169+ # REQUIRES IMPORTING: Once deployed to green this needs to be imported to blue because resource exists after first run
165170resource "aws_s3_bucket_lifecycle_configuration" "data_destinations" {
166171 bucket = aws_s3_bucket. batch_data_destination_bucket . id
167172
@@ -192,13 +197,15 @@ resource "aws_s3_bucket_lifecycle_configuration" "data_destinations" {
192197 }
193198}
194199
200+ # REQUIRES IMPORTING: Once deployed to green this needs to be imported to blue because resource exists after first run
195201resource "aws_s3_bucket" "batch_config_bucket" {
196202 # For now, only create in internal-dev and prod as we only have one shared Redis instance per account.
197203 count = local. create_config_bucket ? 1 : 0
198204
199205 bucket = " imms-${ local . unique_name } -supplier-config"
200206}
201207
208+ # REQUIRES IMPORTING: Once deployed to green this needs to be imported to blue because resource exists after first run
202209resource "aws_s3_bucket_public_access_block" "batch_config_bucket_public_access_block" {
203210 count = local. create_config_bucket ? 1 : 0
204211
@@ -210,6 +217,7 @@ resource "aws_s3_bucket_public_access_block" "batch_config_bucket_public_access_
210217 restrict_public_buckets = true
211218}
212219
220+ # REQUIRES IMPORTING: Once deployed to green this needs to be imported to blue because resource exists after first run
213221resource "aws_s3_bucket_policy" "batch_config_bucket_policy" {
214222 count = local. create_config_bucket ? 1 : 0
215223
0 commit comments