Skip to content

Commit 52b67ed

Browse files
author
Andrei Neagu
committed
rename
1 parent 1bbd2c1 commit 52b67ed

File tree

3 files changed

+7
-5
lines changed

3 files changed

+7
-5
lines changed

packages/aws-library/src/aws_library/s3/_constants.py

Lines changed: 3 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -9,7 +9,9 @@
99
MULTIPART_COPY_THRESHOLD: Final[ByteSize] = TypeAdapter(ByteSize).validate_python(
1010
"100MiB"
1111
)
12-
STREAM_REDER_CHUNK: Final[ByteSize] = TypeAdapter(ByteSize).validate_python("10MiB")
12+
STREAM_READER_CHUNK_SIZE: Final[ByteSize] = TypeAdapter(ByteSize).validate_python(
13+
"10MiB"
14+
)
1315

1416
PRESIGNED_LINK_MAX_SIZE: Final[ByteSize] = TypeAdapter(ByteSize).validate_python("5GiB")
1517
S3_MAX_FILE_SIZE: Final[ByteSize] = TypeAdapter(ByteSize).validate_python("5TiB")

packages/aws-library/tests/test_s3_client.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -28,7 +28,7 @@
2828
from aws_library.s3._client import _AWS_MAX_ITEMS_PER_PAGE, S3ObjectKey, SimcoreS3API
2929
from aws_library.s3._constants import (
3030
MULTIPART_UPLOADS_MIN_TOTAL_SIZE,
31-
STREAM_REDER_CHUNK,
31+
STREAM_READER_CHUNK_SIZE,
3232
)
3333
from aws_library.s3._errors import (
3434
S3BucketInvalidError,
@@ -1902,7 +1902,7 @@ async def test_workflow_compress_s3_objects_and_local_files_in_a_single_archive_
19021902
get_zip_bytes_iter(
19031903
archive_entries,
19041904
progress_bar=progress_bar,
1905-
chunk_size=STREAM_REDER_CHUNK,
1905+
chunk_size=STREAM_READER_CHUNK_SIZE,
19061906
)
19071907
),
19081908
)

services/storage/src/simcore_service_storage/utils/simcore_s3_dsm_utils.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -4,7 +4,7 @@
44

55
import orjson
66
from aws_library.s3 import S3MetaData, SimcoreS3API
7-
from aws_library.s3._constants import STREAM_REDER_CHUNK
7+
from aws_library.s3._constants import STREAM_READER_CHUNK_SIZE
88
from models_library.api_schemas_storage.storage_schemas import S3BucketName
99
from models_library.projects import ProjectID
1010
from models_library.projects_nodes_io import (
@@ -173,7 +173,7 @@ async def create_and_upload_export(
173173
get_zip_bytes_iter(
174174
archive_entries,
175175
progress_bar=progress_bar,
176-
chunk_size=STREAM_REDER_CHUNK,
176+
chunk_size=STREAM_READER_CHUNK_SIZE,
177177
)
178178
),
179179
)

0 commit comments

Comments
 (0)