Skip to content

Commit 8195ae8

Browse files
committed
sonar
1 parent e44ac5d commit 8195ae8

File tree

1 file changed

+30
-29
lines changed

1 file changed

+30
-29
lines changed

services/storage/src/simcore_service_storage/datcore_dsm.py

Lines changed: 30 additions & 29 deletions
Original file line numberDiff line numberDiff line change
@@ -17,7 +17,6 @@
1717
from models_library.projects_nodes_io import LocationID, LocationName, StorageFileID
1818
from models_library.users import UserID
1919
from pydantic import AnyUrl, ByteSize, NonNegativeInt, TypeAdapter, ValidationError
20-
from servicelib.utils import limited_as_completed
2120

2221
from .constants import DATCORE_ID, DATCORE_STR
2322
from .dsm_factory import BaseDataManager
@@ -206,39 +205,41 @@ async def compute_path_total_size(self, user_id: UserID, *, path: Path) -> ByteS
206205
if dataset_size is not None:
207206
return dataset_size
208207

209-
# generic computation
208+
# generic computation (slow and unoptimized - could be improved if necessary by using datcore data better)
210209
try:
211-
paths, cursor, total_number = await self.list_paths(
212-
user_id, file_filter=path, cursor=None, limit=50
213-
)
214210
accumulated_size = ByteSize(0)
211+
paths_to_process = [path]
215212

216-
next_folders: list[PathMetaData] = []
217-
for p in paths:
218-
if p.file_meta_data is not None:
219-
# this is a file
220-
assert (
221-
p.file_meta_data.file_size is not UNDEFINED_SIZE_TYPE
222-
) # nosec
223-
assert isinstance(p.file_meta_data.file_size, ByteSize) # nosec
224-
accumulated_size = ByteSize(
225-
accumulated_size + p.file_meta_data.file_size
226-
)
227-
else:
228-
next_folders.append(p)
229-
async for sbfolder_size_future in limited_as_completed(
230-
(
231-
self.compute_path_total_size(user_id, path=sub_folder.path)
232-
for sub_folder in next_folders
233-
),
234-
limit=3,
235-
):
236-
size = await sbfolder_size_future
237-
accumulated_size = ByteSize(accumulated_size + size)
213+
while paths_to_process:
214+
current_path = paths_to_process.pop()
215+
paths, cursor, _ = await self.list_paths(
216+
user_id, file_filter=current_path, cursor=None, limit=50
217+
)
218+
219+
while paths:
220+
for p in paths:
221+
if p.file_meta_data is not None:
222+
# this is a file
223+
assert (
224+
p.file_meta_data.file_size is not UNDEFINED_SIZE_TYPE
225+
) # nosec
226+
assert isinstance(
227+
p.file_meta_data.file_size, ByteSize
228+
) # nosec
229+
accumulated_size = ByteSize(
230+
accumulated_size + p.file_meta_data.file_size
231+
)
232+
continue
233+
paths_to_process.append(p.path)
234+
235+
if cursor:
236+
paths, cursor, _ = await self.list_paths(
237+
user_id, file_filter=current_path, cursor=cursor, limit=50
238+
)
239+
else:
240+
break
238241

239242
return accumulated_size
240-
if len(paths) == 0:
241-
return ByteSize(0)
242243

243244
except ValidationError:
244245
# invalid path

0 commit comments

Comments
 (0)