Skip to content

Commit 6fb389d

Browse files
author
Andrei Neagu
committed
repalced with simpler implementation
1 parent a5c9060 commit 6fb389d

File tree

2 files changed

+10
-37
lines changed

2 files changed

+10
-37
lines changed

packages/aws-library/src/aws_library/s3/_client.py

Lines changed: 3 additions & 35 deletions
Original file line numberDiff line numberDiff line change
@@ -20,6 +20,7 @@
2020
from servicelib.progress_bar import ProgressBarData
2121
from servicelib.utils import limited_gather
2222
from servicelib.zip_stream import DEFAULT_READ_CHUNK_SIZE, FileSize, FileStream
23+
from servicelib.zip_stream._file_like import FileLikeFileStreamReader
2324
from servicelib.zip_stream._types import StreamData
2425
from settings_library.s3 import S3Settings
2526
from types_aiobotocore_s3 import S3Client
@@ -516,42 +517,9 @@ async def upload_object_from_file_stream(
516517
self,
517518
bucket_name: S3BucketName,
518519
object_key: S3ObjectKey,
519-
file_stream: FileStream,
520+
file_like_reader: FileLikeFileStreamReader,
520521
) -> None:
521-
# Create a multipart upload
522-
multipart_response = await self._client.create_multipart_upload(
523-
Bucket=bucket_name, Key=object_key
524-
)
525-
upload_id = multipart_response["UploadId"]
526-
527-
try:
528-
parts = []
529-
part_number = 1
530-
531-
async for chunk in file_stream:
532-
part_response = await self._client.upload_part(
533-
Bucket=bucket_name,
534-
Key=object_key,
535-
PartNumber=part_number,
536-
UploadId=upload_id,
537-
Body=chunk,
538-
)
539-
parts.append({"ETag": part_response["ETag"], "PartNumber": part_number})
540-
part_number += 1
541-
542-
# Complete the multipart upload
543-
await self._client.complete_multipart_upload(
544-
Bucket=bucket_name,
545-
Key=object_key,
546-
UploadId=upload_id,
547-
MultipartUpload={"Parts": parts}, # type: ignore[typeddict-item]
548-
)
549-
except Exception:
550-
# Abort the multipart upload if something goes wrong
551-
await self._client.abort_multipart_upload(
552-
Bucket=bucket_name, Key=object_key, UploadId=upload_id
553-
)
554-
raise
522+
await self._client.upload_fileobj(file_like_reader, bucket_name, object_key) # type: ignore[arg-type]
555523

556524
@staticmethod
557525
def is_multipart(file_size: ByteSize) -> bool:

packages/aws-library/tests/test_s3_client.py

Lines changed: 7 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -64,6 +64,7 @@
6464
DiskStreamReader,
6565
get_zip_archive_stream,
6666
)
67+
from servicelib.zip_stream._file_like import FileLikeFileStreamReader
6768
from settings_library.s3 import S3Settings
6869
from types_aiobotocore_s3 import S3Client
6970
from types_aiobotocore_s3.literals import BucketLocationConstraintType
@@ -1436,8 +1437,9 @@ async def test_upload_object_from_file_stream(
14361437
_, file_stream = await simcore_s3_api.get_object_file_stream(
14371438
with_s3_bucket, with_uploaded_file_on_s3.s3_key
14381439
)
1440+
14391441
await simcore_s3_api.upload_object_from_file_stream(
1440-
with_s3_bucket, object_key, file_stream(AsyncMock())
1442+
with_s3_bucket, object_key, FileLikeFileStreamReader(file_stream(AsyncMock()))
14411443
)
14421444

14431445
await simcore_s3_api.delete_object(bucket=with_s3_bucket, object_key=object_key)
@@ -1587,8 +1589,11 @@ async def test_workflow_compress_s3_objects_and_local_files_in_a_single_archive_
15871589
await simcore_s3_api.upload_object_from_file_stream(
15881590
with_s3_bucket,
15891591
archive_s3_object_key,
1590-
get_zip_archive_stream(archive_file_entries, progress_bar=progress_bar),
1592+
FileLikeFileStreamReader(
1593+
get_zip_archive_stream(archive_file_entries, progress_bar=progress_bar)
1594+
),
15911595
)
1596+
15921597
duration = time.time() - started
15931598
print(f"Zip created on S3 in {duration:.2f} seconds")
15941599

0 commit comments

Comments
 (0)