Skip to content

Commit 973423e

Browse files
author
Andrei Neagu
committed
refactor
1 parent 357273a commit 973423e

File tree

2 files changed

+27
-13
lines changed

2 files changed

+27
-13
lines changed

packages/aws-library/tests/test_s3_client.py

Lines changed: 19 additions & 11 deletions
Original file line numberDiff line numberDiff line change
@@ -11,6 +11,7 @@
1111
import json
1212
import logging
1313
import random
14+
import time
1415
from collections import defaultdict
1516
from collections.abc import AsyncIterator, Awaitable, Callable, Iterator
1617
from dataclasses import dataclass
@@ -1446,11 +1447,10 @@ async def test_upload_object_from_file_stream(
14461447
@pytest.fixture
14471448
def files_stored_locally(
14481449
create_file_of_size: Callable[[ByteSize], Path],
1450+
file_size: ByteSize,
1451+
local_count: int,
14491452
) -> Iterator[set[Path]]:
1450-
files = {
1451-
create_file_of_size(TypeAdapter(ByteSize).validate_python("10Mib"))
1452-
for _ in range(10)
1453-
}
1453+
files = {create_file_of_size(file_size) for _ in range(local_count)}
14541454

14551455
yield files
14561456

@@ -1461,13 +1461,12 @@ def files_stored_locally(
14611461
@pytest.fixture
14621462
async def files_stored_in_s3(
14631463
create_file_of_size: Callable[[ByteSize], Path],
1464+
file_size: ByteSize,
1465+
remote_count: int,
14641466
s3_client: S3Client,
14651467
with_s3_bucket: S3BucketName,
14661468
) -> AsyncIterator[set[Path]]:
1467-
files = {
1468-
create_file_of_size(TypeAdapter(ByteSize).validate_python("10Mib"))
1469-
for _ in range(10)
1470-
}
1469+
files = {create_file_of_size(file_size) for _ in range(remote_count)}
14711470
for file in files:
14721471
await s3_client.upload_file(
14731472
Filename=f"{file}",
@@ -1524,6 +1523,14 @@ async def archive_s3_object_key(
15241523
await simcore_s3_api.delete_object(bucket=with_s3_bucket, object_key=s3_object_key)
15251524

15261525

1526+
@pytest.mark.parametrize(
1527+
"file_size, local_count, remote_count",
1528+
[
1529+
pytest.param(
1530+
TypeAdapter(ByteSize).validate_python("10Mib"), 10, 10, id="small"
1531+
),
1532+
],
1533+
)
15271534
async def test_workflow_compress_s3_objects_and_local_files_in_a_single_archive_then_upload_to_s3(
15281535
mocked_s3_server_envs: EnvVarsDict,
15291536
files_stored_locally: set[Path],
@@ -1554,7 +1561,6 @@ async def test_workflow_compress_s3_objects_and_local_files_in_a_single_archive_
15541561
)
15551562

15561563
for s3_object_key in _get_s3_object_keys(files_stored_in_s3):
1557-
print(f"will upload {s3_object_key=}")
15581564
archive_file_entries.append(
15591565
(
15601566
s3_object_key,
@@ -1573,6 +1579,7 @@ async def test_workflow_compress_s3_objects_and_local_files_in_a_single_archive_
15731579
progress_report_cb=mocked_progress_bar_cb,
15741580
description="root_bar",
15751581
) as root:
1582+
started = time.time()
15761583
await simcore_s3_api.upload_object_from_file_stream(
15771584
with_s3_bucket,
15781585
archive_s3_object_key,
@@ -1582,9 +1589,11 @@ async def test_workflow_compress_s3_objects_and_local_files_in_a_single_archive_
15821589
chunk_size=MIN_MULTIPART_UPLOAD_CHUNK_SIZE,
15831590
),
15841591
)
1592+
duration = time.time() - started
1593+
print(f"Zip created on S3 in {duration:.2f} seconds")
15851594

15861595
# 2. download zip archive form S3
1587-
1596+
print(f"downloading {archive_download_path}")
15881597
await s3_client.download_file(
15891598
with_s3_bucket, archive_s3_object_key, f"{archive_download_path}"
15901599
)
@@ -1593,7 +1602,6 @@ async def test_workflow_compress_s3_objects_and_local_files_in_a_single_archive_
15931602
await unarchive_dir(archive_download_path, extracted_archive_path)
15941603

15951604
# 4. compare
1596-
15971605
print("comparing files")
15981606
all_files_in_zip = get_files_info_from_itrable(
15991607
files_stored_locally

packages/pytest-simcore/src/pytest_simcore/helpers/comparing.py

Lines changed: 8 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -4,6 +4,7 @@
44

55
import aiofiles
66
from servicelib.file_utils import create_sha256_checksum
7+
from servicelib.utils import limited_gather
78

89
_FilesInfo: TypeAlias = dict[str, Path]
910

@@ -30,5 +31,10 @@ def get_files_info_from_itrable(items: Iterable[Path]) -> _FilesInfo:
3031
async def assert_same_contents(file_info1: _FilesInfo, file_info2: _FilesInfo) -> None:
3132
assert set(file_info1.keys()) == set(file_info2.keys())
3233

33-
for file_name in file_info1:
34-
await assert_same_file_content(file_info1[file_name], file_info2[file_name])
34+
await limited_gather(
35+
*(
36+
assert_same_file_content(file_info1[file_name], file_info2[file_name])
37+
for file_name in file_info1
38+
),
39+
limit=10,
40+
)

0 commit comments

Comments
 (0)