Skip to content

Commit 33aca4f

Browse files
author
Andrei Neagu
committed
fixed zip, using zip_64 by default
1 parent d91899e commit 33aca4f

File tree

4 files changed

+11
-6
lines changed

4 files changed

+11
-6
lines changed

packages/aws-library/src/aws_library/s3/_constants.py

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -9,6 +9,7 @@
99
MULTIPART_COPY_THRESHOLD: Final[ByteSize] = TypeAdapter(ByteSize).validate_python(
1010
"100MiB"
1111
)
12+
STREAM_REDER_CHUNK: Final[ByteSize] = TypeAdapter(ByteSize).validate_python("10MiB")
1213

1314
PRESIGNED_LINK_MAX_SIZE: Final[ByteSize] = TypeAdapter(ByteSize).validate_python("5GiB")
1415
S3_MAX_FILE_SIZE: Final[ByteSize] = TypeAdapter(ByteSize).validate_python("5TiB")

packages/aws-library/tests/test_s3_client.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -27,8 +27,8 @@
2727
from aiohttp import ClientSession
2828
from aws_library.s3._client import _AWS_MAX_ITEMS_PER_PAGE, S3ObjectKey, SimcoreS3API
2929
from aws_library.s3._constants import (
30-
MULTIPART_COPY_THRESHOLD,
3130
MULTIPART_UPLOADS_MIN_TOTAL_SIZE,
31+
STREAM_REDER_CHUNK,
3232
)
3333
from aws_library.s3._errors import (
3434
S3BucketInvalidError,
@@ -1902,7 +1902,7 @@ async def test_workflow_compress_s3_objects_and_local_files_in_a_single_archive_
19021902
get_zip_bytes_iter(
19031903
archive_entries,
19041904
progress_bar=progress_bar,
1905-
chunk_size=MULTIPART_COPY_THRESHOLD,
1905+
chunk_size=STREAM_REDER_CHUNK,
19061906
)
19071907
),
19081908
)

packages/service-library/src/servicelib/bytes_iters/_stream_zip.py

Lines changed: 6 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,14 +1,17 @@
1+
import logging
12
from collections.abc import AsyncIterable
23
from datetime import UTC, datetime
34
from stat import S_IFREG
45
from typing import TypeAlias
56

67
from models_library.bytes_iters import BytesIter, DataSize
7-
from stream_zip import ZIP_32, AsyncMemberFile, async_stream_zip
8+
from stream_zip import ZIP_64, AsyncMemberFile, async_stream_zip
89

910
from ..progress_bar import ProgressBarData
1011
from ._models import BytesStreamer
1112

13+
_logger = logging.getLogger(__name__)
14+
1215
FileNameInArchive: TypeAlias = str
1316
ArchiveFileEntry: TypeAlias = tuple[FileNameInArchive, BytesStreamer]
1417
ArchiveEntries: TypeAlias = list[ArchiveFileEntry]
@@ -22,7 +25,7 @@ async def _member_files_iter(
2225
file_name,
2326
datetime.now(UTC),
2427
S_IFREG | 0o600,
25-
ZIP_32,
28+
ZIP_64,
2629
byte_streamer.with_progress_bytes_iter(progress_bar=progress_bar),
2730
)
2831

@@ -42,6 +45,7 @@ async def get_zip_bytes_iter(
4245
sum(bytes_streamer.data_size for _, bytes_streamer in archive_entries)
4346
)
4447
description = f"files: count={len(archive_entries)}, size={total_stream_lenth.human_readable()}"
48+
_logger.info("Archiving %s", description)
4549

4650
async with progress_bar.sub_progress(
4751
steps=total_stream_lenth, description=description, progress_unit="Byte"

services/storage/src/simcore_service_storage/utils/simcore_s3_dsm_utils.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -4,7 +4,7 @@
44

55
import orjson
66
from aws_library.s3 import S3MetaData, SimcoreS3API
7-
from aws_library.s3._constants import MULTIPART_COPY_THRESHOLD
7+
from aws_library.s3._constants import STREAM_REDER_CHUNK
88
from models_library.api_schemas_storage.storage_schemas import S3BucketName
99
from models_library.projects import ProjectID
1010
from models_library.projects_nodes_io import (
@@ -173,7 +173,7 @@ async def create_and_upload_export(
173173
get_zip_bytes_iter(
174174
archive_entries,
175175
progress_bar=progress_bar,
176-
chunk_size=MULTIPART_COPY_THRESHOLD,
176+
chunk_size=STREAM_REDER_CHUNK,
177177
)
178178
),
179179
)

0 commit comments

Comments
 (0)