Skip to content

Commit 5183f2d

Browse files
committed
🐛⚗️RClone: Disable multi-threading 🚨 (#5790)
1 parent 1449d44 commit 5183f2d

File tree

2 files changed

+15
-17
lines changed

2 files changed

+15
-17
lines changed

packages/simcore-sdk/src/simcore_sdk/node_ports_common/r_clone.py

Lines changed: 5 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -172,6 +172,9 @@ async def _get_folder_size(
172172
return rclone_folder_size_result.bytes
173173

174174

175+
_DISABLE_RCLONE_MULTI_THREADED: Final[int] = 1
176+
177+
175178
async def _sync_sources(
176179
r_clone_settings: RCloneSettings,
177180
progress_bar: ProgressBarData,
@@ -209,22 +212,18 @@ async def _sync_sources(
209212
"--buffer-size", # docs https://rclone.org/docs/#buffer-size-size
210213
r_clone_settings.R_CLONE_OPTION_BUFFER_SIZE,
211214
"--use-json-log",
212-
# make sure stats can be noticed at the end
213-
# "--stats-log-level",
214-
# "INFO",
215215
# frequent polling for faster progress updates
216216
"--stats",
217217
"200ms",
218-
# makes sure the stats are only sending the summary
219-
# "--stats-one-line",
220-
# "--progress",
221218
"--verbose",
222219
"sync",
223220
shlex.quote(source),
224221
shlex.quote(destination),
225222
# filter options
226223
*_get_exclude_filters(exclude_patterns),
227224
"--links",
225+
"--multi-thread-streams",
226+
f"{_DISABLE_RCLONE_MULTI_THREADED}",
228227
)
229228

230229
async with progress_bar.sub_progress(

packages/simcore-sdk/tests/integration/test_node_ports_common_r_clone.py

Lines changed: 10 additions & 11 deletions
Original file line numberDiff line numberDiff line change
@@ -16,6 +16,7 @@
1616
import aiofiles
1717
import pytest
1818
from faker import Faker
19+
from models_library.progress_bar import ProgressReport
1920
from pydantic import AnyUrl, ByteSize, parse_obj_as
2021
from servicelib.file_utils import remove_directory
2122
from servicelib.progress_bar import ProgressBarData
@@ -50,9 +51,7 @@ async def cleanup_bucket_after_test(r_clone_settings: RCloneSettings) -> None:
5051
endpoint_url=r_clone_settings.R_CLONE_S3.S3_ENDPOINT,
5152
) as s_3:
5253
bucket = await s_3.Bucket(r_clone_settings.R_CLONE_S3.S3_BUCKET_NAME)
53-
s3_objects = []
54-
async for s3_object in bucket.objects.all():
55-
s3_objects.append(s3_object) # noqa: PERF402
54+
s3_objects = [_ async for _ in bucket.objects.all()]
5655
await asyncio.gather(*[o.delete() for o in s3_objects])
5756

5857

@@ -82,7 +81,7 @@ async def _create_random_binary_file(
8281
file_path: Path,
8382
file_size: ByteSize,
8483
# NOTE: bigger files get created faster with bigger chunk_size
85-
chunk_size: int = parse_obj_as(ByteSize, "1mib"), # noqa: B008
84+
chunk_size: int = parse_obj_as(ByteSize, "1mib"),
8685
):
8786
async with aiofiles.open(file_path, mode="wb") as file:
8887
bytes_written = 0
@@ -134,11 +133,11 @@ async def _upload_local_dir_to_s3(
134133
# Since using moto to mock the S3 api, downloading is way to fast.
135134
# Progress behaves as expected with CEPH and AWS S3 backends.
136135

137-
progress_entries: list[float] = []
136+
progress_entries: list[ProgressReport] = []
138137

139-
async def _report_progress_upload(progress_value: float) -> None:
140-
print(">>>|", progress_value, "| ⏫")
141-
progress_entries.append(progress_value)
138+
async def _report_progress_upload(report: ProgressReport) -> None:
139+
print(">>>|", report, "| ⏫")
140+
progress_entries.append(report)
142141

143142
async with ProgressBarData(
144143
num_steps=1,
@@ -164,8 +163,8 @@ async def _download_from_s3_to_local_dir(
164163
destination_dir: Path,
165164
faker: Faker,
166165
) -> None:
167-
async def _report_progress_download(progress_value: float) -> None:
168-
print(">>>|", progress_value, "| ⏬")
166+
async def _report_progress_download(report: ProgressReport) -> None:
167+
print(">>>|", report, "| ⏬")
169168

170169
async with ProgressBarData(
171170
num_steps=1,
@@ -368,7 +367,7 @@ async def test_overwrite_an_existing_file_and_sync_again(
368367
assert len(generated_file_names) > 0
369368

370369
# get s3 reference link
371-
directory_uuid = create_valid_file_uuid(f"{dir_locally_created_files}", Path(""))
370+
directory_uuid = create_valid_file_uuid(f"{dir_locally_created_files}", Path())
372371
s3_directory_link = _fake_s3_link(r_clone_settings, directory_uuid)
373372

374373
# sync local to remote and check

0 commit comments

Comments
 (0)