Skip to content

Commit 56d632d

Browse files
authored
Expose configuration needed for testing profile authorization (#344)
Expose configuration needed for testing profile authorization
1 parent ea101e9 commit 56d632d

File tree

8 files changed

+158
-13
lines changed

8 files changed

+158
-13
lines changed

.github/workflows/python-integration.yml

Lines changed: 8 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -32,11 +32,15 @@ jobs:
3232
region: ${{ vars.S3_REGION }}
3333
storage-class: ""
3434
endpoint-url: ${{ vars.S3_CUSTOM_ENDPOINT_URL }}
35+
profile-role: ${{ vars.PROFILE_IAM_ROLE }}
36+
profile-bucket: ${{ vars.S3_PROFILE_BUCKET }}
3537
- name: "S3 Express"
3638
bucket: ${{ vars.S3_EXPRESS_BUCKET }}
3739
region: ${{ vars.S3_EXPRESS_REGION }}
3840
storage-class: "EXPRESS_ONEZONE"
3941
endpoint-url: ""
42+
profile-role: ${{ vars.PROFILE_IAM_ROLE }}
43+
profile-bucket: ${{ vars.S3_EXPRESS_PROFILE_BUCKET }}
4044
permissions:
4145
id-token: write
4246
contents: read
@@ -88,6 +92,8 @@ jobs:
8892
CI_REGION=${{ matrix.test-run.region }} \
8993
CI_BUCKET=${{ matrix.test-run.bucket }} \
9094
CI_STORAGE_CLASS=${{ matrix.test-run.storage-class }} \
95+
CI_PROFILE_ROLE=${{ matrix.test-run.profile-role }} \
96+
CI_PROFILE_BUCKET=${{ matrix.test-run.profile-bucket }} \
9197
pytest s3torchconnector/tst/e2e --ignore-glob '*/**/test_e2e_s3_lightning_checkpoint.py' --ignore-glob '*/**/dcp' --ignore-glob '*/**/test_distributed_training.py' -n auto
9298
9399
- name: s3torchconnector ${{ matrix.test-run.name }} distributed training integration tests
@@ -125,6 +131,8 @@ jobs:
125131
CI_BUCKET=${{ matrix.test-run.bucket }} \
126132
CI_STORAGE_CLASS=${{ matrix.test-run.storage-class }} \
127133
CI_CUSTOM_ENDPOINT_URL=${{ matrix.test-run.endpoint-url }} \
134+
CI_PROFILE_ROLE=${{ matrix.test-run.profile-role }} \
135+
CI_PROFILE_BUCKET=${{ matrix.test-run.profile-bucket }} \
128136
pytest s3torchconnectorclient/python/tst/integration -n auto
129137
130138
- name: Save Cargo cache

.github/workflows/wheels.yml

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -12,6 +12,9 @@ env:
1212
S3_PREFIX: ${{ vars.S3_PREFIX }}
1313
S3_EXPRESS_BUCKET: ${{ vars.S3_EXPRESS_BUCKET }}
1414
S3_EXPRESS_REGION: ${{ vars.S3_EXPRESS_REGION }}
15+
PROFILE_IAM_ROLE: ${{vars.PROFILE_IAM_ROLE}}
16+
S3_PROFILE_BUCKET: ${{vars.S3_PROFILE_BUCKET}}
17+
S3_EXPRESS_PROFILE_BUCKET: ${{vars.S3_EXPRESS_PROFILE_BUCKET}}
1518

1619
jobs:
1720
generate_third_party_licenses:

run_cibuildwheel_on_ec2.sh

Lines changed: 9 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,6 @@
1-
if [ $# -ne 7 ]; then
2-
echo "Invalid number of parameters, you need to provide role name, region name, bucket name, prefix, express region name and express bucket name, custom endpoint for s3 standard"
3-
echo "Usage: $0 S3RoleName us-west-2 s3torchconnector-test-bucket-name prefix-name/ us-east-1 s3torchconnectorclient-express-bucket-name https://s3.amazon.com"
1+
if [ $# -ne 10 ]; then
2+
echo "Invalid number of parameters, you need to provide role name, region name, bucket name, prefix, express region name and express bucket name, custom endpoint for s3 standard, auth profile arn and buckets names for testing auth profile"
3+
echo "Usage: $0 S3RoleName us-west-2 s3torchconnector-test-bucket-name prefix-name/ us-east-1 s3torchconnectorclient-express-bucket-name https://s3.amazon.com arn:aws:iam::XXXXXXXXXXX:role/RoleName profile-test-bucket-name profile-test-express-bucket-name "
44
exit 1
55
fi
66

@@ -11,6 +11,9 @@ PREFIX=$4
1111
EXPRESS_REGION_NAME=$5
1212
EXPRESS_BUCKET_NAME=$6
1313
S3_CUSTOM_ENDPOINT_URL=$7
14+
PROFILE_IAM_ROLE=$8
15+
S3_PROFILE_BUCKET=$9
16+
S3_EXPRESS_PROFILE_BUCKET=${10}
1417

1518
FILE_NAME="tmp_cred.json"
1619
# Set metadata token TTL to 6 hours
@@ -30,5 +33,8 @@ export S3_PREFIX=${PREFIX}
3033
export S3_EXPRESS_REGION=${EXPRESS_REGION_NAME}
3134
export S3_EXPRESS_BUCKET=${EXPRESS_BUCKET_NAME}
3235
export S3_CUSTOM_ENDPOINT_URL=${S3_CUSTOM_ENDPOINT_URL}
36+
export PROFILE_IAM_ROLE=${PROFILE_IAM_ROLE}
37+
export S3_PROFILE_BUCKET=${S3_PROFILE_BUCKET}
38+
export S3_EXPRESS_PROFILE_BUCKET=${S3_EXPRESS_PROFILE_BUCKET}
3339

3440
CIBW_MANYLINUX_X86_64_IMAGE=manylinux2014 CIBW_MANYLINUX_AARCH64_IMAGE=manylinux2014 cibuildwheel --output-dir wheelhouse --platform linux s3torchconnectorclient

s3torchconnector/tst/e2e/conftest.py

Lines changed: 38 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -27,6 +27,8 @@ class BucketPrefixData(object):
2727
prefix: str
2828
storage_class: str = None
2929
contents: dict
30+
profile_arn: str = None
31+
profile_bucket: str = None
3032

3133
def __init__(
3234
self,
@@ -35,12 +37,16 @@ def __init__(
3537
prefix: str,
3638
storage_class: str = None,
3739
contents: dict = None,
40+
profile_arn: str = None,
41+
profile_bucket: str = None,
3842
):
3943
self.bucket = bucket
4044
self.prefix = prefix
4145
self.region = region
4246
self.storage_class = storage_class
4347
self.contents = contents or {}
48+
self.profile_arn = profile_arn
49+
self.profile_bucket = profile_bucket
4450

4551
@property
4652
def s3_uri(self):
@@ -61,9 +67,22 @@ class BucketPrefixFixture(BucketPrefixData):
6167
to this instance, so other concurrent tests won't affect its state."""
6268

6369
def __init__(
64-
self, region: str, bucket: str, prefix: str, storage_class: str = None
70+
self,
71+
region: str,
72+
bucket: str,
73+
prefix: str,
74+
storage_class: str = None,
75+
profile_arn: str = None,
76+
profile_bucket: str = None,
6577
):
66-
super().__init__(region, bucket, prefix, storage_class)
78+
super().__init__(
79+
region,
80+
bucket,
81+
prefix,
82+
storage_class,
83+
profile_arn=profile_arn,
84+
profile_bucket=profile_bucket,
85+
)
6786
session = boto3.Session(region_name=region)
6887
self.s3 = session.client("s3")
6988

@@ -80,7 +99,13 @@ def get_data_snapshot(self):
8099
Useful when passing data to another process without serializing s3 client
81100
"""
82101
return BucketPrefixData(
83-
self.region, self.bucket, self.prefix, self.storage_class, self.contents
102+
self.region,
103+
self.bucket,
104+
self.prefix,
105+
self.storage_class,
106+
self.contents,
107+
self.profile_arn,
108+
self.profile_bucket,
84109
)
85110

86111

@@ -90,12 +115,16 @@ def get_test_bucket_prefix(name: str) -> BucketPrefixFixture:
90115
prefix = getenv("CI_PREFIX")
91116
region = getenv("CI_REGION")
92117
storage_class = getenv("CI_STORAGE_CLASS", optional=True)
118+
profile_arn = getenv("CI_PROFILE_ROLE", optional=True)
119+
profile_bucket = getenv("CI_PROFILE_BUCKET", optional=True)
93120
assert prefix == "" or prefix.endswith("/")
94121

95122
nonce = random.randrange(2**64)
96123
prefix = f"{prefix}{name}/{nonce}/"
97124

98-
return BucketPrefixFixture(region, bucket, prefix, storage_class)
125+
return BucketPrefixFixture(
126+
region, bucket, prefix, storage_class, profile_arn, profile_bucket
127+
)
99128

100129

101130
@pytest.fixture
@@ -139,3 +168,8 @@ def _create_image_directory_fixture(num_image: int, image_size: int, node_name:
139168
@pytest.fixture
140169
def checkpoint_directory(request) -> BucketPrefixFixture:
141170
return get_test_bucket_prefix(f"{request.node.name}/checkpoint_directory")
171+
172+
173+
@pytest.fixture
174+
def empty_directory(request) -> BucketPrefixFixture:
175+
return get_test_bucket_prefix(f"{request.node.name}/empty_directory")
Lines changed: 26 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,26 @@
1+
# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
2+
# // SPDX-License-Identifier: BSD
3+
4+
import pytest
5+
from s3torchconnectorclient import S3Exception
6+
7+
from s3torchconnector._s3client import S3Client
8+
9+
HELLO_WORLD_DATA = b"Hello, World!\n"
10+
11+
12+
def test_no_access_objects_without_profile(empty_directory):
13+
if empty_directory.profile_bucket is None:
14+
pytest.skip("No profile bucket configured")
15+
16+
client = S3Client(
17+
empty_directory.region,
18+
)
19+
filename = f"{empty_directory.prefix}hello_world.txt"
20+
21+
with pytest.raises(S3Exception):
22+
put_stream = client.put_object(
23+
empty_directory.profile_bucket,
24+
filename,
25+
)
26+
put_stream.write(HELLO_WORLD_DATA)

s3torchconnectorclient/pyproject.toml

Lines changed: 9 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -32,7 +32,7 @@ test = [
3232
"flake8",
3333
"black",
3434
"mypy",
35-
"Pillow"
35+
"Pillow<=11.2.1" # installation of the newer versions fails in manylinux2014 images
3636
]
3737

3838
[tool.setuptools.packages]
@@ -51,10 +51,10 @@ test-extras = "test"
5151
test-command = [
5252
"pytest {package}/python/tst/unit",
5353
"pytest {package}/../s3torchconnector/tst/unit --ignore {package}/../s3torchconnector/tst/unit/lightning --ignore {package}/../s3torchconnector/tst/unit/dcp",
54-
"CI_STORAGE_CLASS='' CI_REGION=${S3_REGION} CI_BUCKET=${S3_BUCKET} CI_PREFIX=${S3_PREFIX} CI_CUSTOM_ENDPOINT_URL=${S3_CUSTOM_ENDPOINT_URL} pytest {package}/python/tst/integration",
55-
"CI_STORAGE_CLASS='' CI_REGION=${S3_REGION} CI_BUCKET=${S3_BUCKET} CI_PREFIX=${S3_PREFIX} CI_CUSTOM_ENDPOINT_URL=${S3_CUSTOM_ENDPOINT_URL} pytest {package}/../s3torchconnector/tst/e2e --ignore {package}/../s3torchconnector/tst/e2e/test_e2e_s3_lightning_checkpoint.py --ignore {package}/../s3torchconnector/tst/e2e/dcp",
56-
"CI_STORAGE_CLASS=EXPRESS_ONEZONE CI_REGION=${S3_EXPRESS_REGION} CI_BUCKET=${S3_EXPRESS_BUCKET} CI_PREFIX=${S3_PREFIX} CI_CUSTOM_ENDPOINT_URL='' pytest {package}/python/tst/integration",
57-
"CI_STORAGE_CLASS=EXPRESS_ONEZONE CI_REGION=${S3_EXPRESS_REGION} CI_BUCKET=${S3_EXPRESS_BUCKET} CI_PREFIX=${S3_PREFIX} CI_CUSTOM_ENDPOINT_URL='' pytest {package}/../s3torchconnector/tst/e2e --ignore {package}/../s3torchconnector/tst/e2e/test_e2e_s3_lightning_checkpoint.py --ignore {package}/../s3torchconnector/tst/e2e/dcp",
54+
"CI_STORAGE_CLASS='' CI_REGION=${S3_REGION} CI_BUCKET=${S3_BUCKET} CI_PREFIX=${S3_PREFIX} CI_CUSTOM_ENDPOINT_URL=${S3_CUSTOM_ENDPOINT_URL} CI_PROFILE_ROLE=${PROFILE_IAM_ROLE} CI_PROFILE_BUCKET=${S3_PROFILE_BUCKET} pytest {package}/python/tst/integration",
55+
"CI_STORAGE_CLASS='' CI_REGION=${S3_REGION} CI_BUCKET=${S3_BUCKET} CI_PREFIX=${S3_PREFIX} CI_CUSTOM_ENDPOINT_URL=${S3_CUSTOM_ENDPOINT_URL} CI_PROFILE_ROLE=${PROFILE_IAM_ROLE} CI_PROFILE_BUCKET=${S3_PROFILE_BUCKET} pytest {package}/../s3torchconnector/tst/e2e --ignore {package}/../s3torchconnector/tst/e2e/test_e2e_s3_lightning_checkpoint.py --ignore {package}/../s3torchconnector/tst/e2e/dcp",
56+
"CI_STORAGE_CLASS=EXPRESS_ONEZONE CI_REGION=${S3_EXPRESS_REGION} CI_BUCKET=${S3_EXPRESS_BUCKET} CI_PREFIX=${S3_PREFIX} CI_CUSTOM_ENDPOINT_URL='' CI_PROFILE_ROLE=${PROFILE_IAM_ROLE} CI_PROFILE_BUCKET=${S3_EXPRESS_PROFILE_BUCKET} pytest {package}/python/tst/integration",
57+
"CI_STORAGE_CLASS=EXPRESS_ONEZONE CI_REGION=${S3_EXPRESS_REGION} CI_BUCKET=${S3_EXPRESS_BUCKET} CI_PREFIX=${S3_PREFIX} CI_CUSTOM_ENDPOINT_URL='' CI_PROFILE_ROLE=${PROFILE_IAM_ROLE} CI_PROFILE_BUCKET=${S3_EXPRESS_PROFILE_BUCKET} pytest {package}/../s3torchconnector/tst/e2e --ignore {package}/../s3torchconnector/tst/e2e/test_e2e_s3_lightning_checkpoint.py --ignore {package}/../s3torchconnector/tst/e2e/dcp",
5858
"python -m pip install -e '{package}/../s3torchconnector[lightning-tests]'",
5959
"pytest {package}/../s3torchconnector/tst/unit/lightning",
6060
"CI_STORAGE_CLASS='' CI_REGION=${S3_REGION} CI_BUCKET=${S3_BUCKET} CI_PREFIX=${S3_PREFIX} CI_CUSTOM_ENDPOINT_URL=${S3_CUSTOM_ENDPOINT_URL} pytest {package}/../s3torchconnector/tst/e2e/test_e2e_s3_lightning_checkpoint.py",
@@ -69,7 +69,10 @@ environment-pass = [
6969
"S3_PREFIX",
7070
"S3_EXPRESS_BUCKET",
7171
"S3_EXPRESS_REGION",
72-
"S3_CUSTOM_ENDPOINT_URL"
72+
"S3_CUSTOM_ENDPOINT_URL",
73+
"PROFILE_IAM_ROLE",
74+
"S3_PROFILE_BUCKET",
75+
"S3_EXPRESS_PROFILE_BUCKET"
7376
]
7477
before-build = "cp README.md s3torchconnectorclient; cp LICENSE s3torchconnectorclient/; cp THIRD-PARTY-LICENSES s3torchconnectorclient/; cp NOTICE s3torchconnectorclient/"
7578
build = ["cp38*", "cp39*", "cp310*", "cp311*", "cp312*"]

s3torchconnectorclient/python/tst/integration/conftest.py

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -33,6 +33,8 @@ class BucketPrefixFixture:
3333
storage_class: Optional[str] = getenv("CI_STORAGE_CLASS", optional=True)
3434
endpoint_url: Optional[str] = getenv("CI_CUSTOM_ENDPOINT_URL", optional=True)
3535
contents: dict = field(default_factory=dict)
36+
profile_arn: Optional[str] = getenv("CI_PROFILE_ROLE", optional=True)
37+
profile_bucket: Optional[str] = getenv("CI_PROFILE_BUCKET", optional=True)
3638

3739
def __post_init__(self):
3840
assert self.prefix == "" or self.prefix.endswith("/")

s3torchconnectorclient/python/tst/integration/test_mountpoint_s3_integration.py

Lines changed: 63 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -6,6 +6,7 @@
66
import os
77
import pickle
88
import sys
9+
import tempfile
910
import uuid
1011
import random
1112
import pytest
@@ -31,6 +32,7 @@
3132

3233
HELLO_WORLD_DATA = b"Hello, World!\n"
3334
TEST_USER_AGENT_PREFIX = "integration-tests"
35+
TEST_PROFILE_NAME = "test-profile"
3436

3537

3638
@pytest.mark.parametrize("force_path_style", [False, True])
@@ -489,6 +491,67 @@ def test_copy_object_raises_when_source_key_does_not_exist(
489491
)
490492

491493

494+
def test_no_access_objects_without_profile(sample_directory: BucketPrefixFixture):
495+
if sample_directory.profile_bucket is None:
496+
pytest.skip("No profile bucket configured")
497+
498+
client = MountpointS3Client(
499+
sample_directory.region,
500+
TEST_USER_AGENT_PREFIX,
501+
)
502+
filename = f"{sample_directory.prefix}hello_world.txt"
503+
504+
with pytest.raises(S3Exception):
505+
put_stream = client.put_object(
506+
sample_directory.profile_bucket,
507+
filename,
508+
)
509+
put_stream.write(HELLO_WORLD_DATA)
510+
511+
512+
def test_access_objects_with_profile(sample_directory: BucketPrefixFixture):
513+
if sample_directory.profile_bucket is None:
514+
pytest.skip("No profile bucket configured")
515+
516+
try:
517+
tmp_file = tempfile.NamedTemporaryFile()
518+
tmp_file.write(
519+
f"""[profile default]
520+
aws_access_key_id = {os.getenv("AWS_ACCESS_KEY_ID")}
521+
aws_secret_access_key = {os.getenv("AWS_SECRET_ACCESS_KEY")}
522+
aws_session_token = {os.getenv("AWS_SESSION_TOKEN")}
523+
524+
[profile {TEST_PROFILE_NAME}]
525+
role_arn = {sample_directory.profile_arn}
526+
region = {sample_directory.region}
527+
source_profile = default""".encode()
528+
)
529+
tmp_file.flush()
530+
os.environ["AWS_CONFIG_FILE"] = tmp_file.name
531+
532+
client = MountpointS3Client(
533+
sample_directory.region,
534+
TEST_USER_AGENT_PREFIX,
535+
profile=TEST_PROFILE_NAME,
536+
)
537+
filename = f"{sample_directory.prefix}hello_world.txt"
538+
put_stream = client.put_object(
539+
sample_directory.profile_bucket,
540+
filename,
541+
)
542+
543+
put_stream.write(HELLO_WORLD_DATA)
544+
put_stream.close()
545+
546+
get_stream = client.get_object(
547+
sample_directory.profile_bucket,
548+
filename,
549+
)
550+
assert b"".join(get_stream) == HELLO_WORLD_DATA
551+
finally:
552+
os.environ["AWS_CONFIG_FILE"] = ""
553+
554+
492555
def _parse_list_result(stream: ListObjectStream, max_keys: int):
493556
object_infos = []
494557
i = 0

0 commit comments

Comments
 (0)