Skip to content

Commit 0527b89

Browse files
hanwen-clusterhanwen-pcluste
authored andcommitted
[integ-tests] Make resource_bucket_shared use the boto3 API in the same region as the bucket
This gets rid of API exceptions when trying to operate buckets in opt-in regions FYI: codes in fixtures with scope <= class do not have to specify region in boto3. Code in fixtures with scope > class have to specify region in boto3 Signed-off-by: Hanwen <[email protected]>
1 parent 5cad43f commit 0527b89

File tree

1 file changed

+4
-3
lines changed

1 file changed

+4
-3
lines changed

tests/integration-tests/conftest_resource_bucket.py

Lines changed: 4 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -109,9 +109,10 @@ def resource_bucket_shared(request, s3_bucket_factory_shared, lambda_layer_sourc
109109

110110
for region, s3_bucket in s3_bucket_factory_shared.items():
111111
logger.info(f"Uploading artifacts to: {s3_bucket}[{region}]")
112+
s3_resource = boto3.resource("s3", region_name=region)
112113
for file, key in get_resource_map().items():
113114
logger.info(f" {root / file} -> {s3_bucket}/{key}")
114-
boto3.resource("s3").Bucket(s3_bucket).upload_file(str(root / file), key)
115+
s3_resource.Bucket(s3_bucket).upload_file(str(root / file), key)
115116

116117
layer_key = (
117118
f"parallelcluster/{get_installed_parallelcluster_version()}/layers/aws-parallelcluster/lambda-layer.zip"
@@ -120,7 +121,7 @@ def resource_bucket_shared(request, s3_bucket_factory_shared, lambda_layer_sourc
120121
bucket, key = re.search(r"s3://([^/]*)/(.*)", lambda_layer_source).groups()
121122
source = {"Bucket": bucket, "Key": key}
122123
logger.info(f"Copying Lambda Layer from: s3://{bucket}/{key} -> s3://{s3_bucket}/{layer_key}")
123-
boto3.resource("s3").Bucket(s3_bucket).copy(source, layer_key)
124+
s3_resource.Bucket(s3_bucket).copy(source, layer_key)
124125
else:
125126
with tempfile.TemporaryDirectory() as basepath:
126127
install_pc(basepath, get_installed_parallelcluster_version())
@@ -130,7 +131,7 @@ def resource_bucket_shared(request, s3_bucket_factory_shared, lambda_layer_sourc
130131
zipfilename = Path(zipfile.name)
131132
logger.info(f" {zipfilename} -> {s3_bucket}/{layer_key}")
132133
shutil.make_archive(zipfilename.with_suffix(""), format="zip", root_dir=basepath)
133-
boto3.resource("s3").Bucket(s3_bucket).upload_file(str(zipfilename), layer_key)
134+
s3_resource.Bucket(s3_bucket).upload_file(str(zipfilename), layer_key)
134135

135136
logger.info(s3_bucket_factory_shared)
136137
return s3_bucket_factory_shared

0 commit comments

Comments
 (0)