Skip to content

Commit dbf42b6

Browse files
fix: project_nodes
1 parent 841593d commit dbf42b6

File tree

1 file changed

+15
-9
lines changed

1 file changed

+15
-9
lines changed

services/storage/tests/unit/test_rpc_handlers_simcore_s3.py

Lines changed: 15 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -230,9 +230,11 @@ async def test_copy_folders_from_valid_project_with_one_large_file(
230230
project_params: ProjectWithFilesParams,
231231
):
232232
# 1. create a src project with 1 large file
233-
src_project, _, src_projects_list = await random_project_with_files(project_params)
233+
src_project, src_project_nodes, src_projects_list = await random_project_with_files(
234+
project_params
235+
)
234236
# 2. create a dst project without files
235-
dst_project, nodes_map = clone_project_data(src_project)
237+
dst_project, _, nodes_map = clone_project_data(src_project, src_project_nodes)
236238
dst_project = await create_project(**dst_project)
237239
# copy the project files
238240
data = await _request_copy_folders(
@@ -241,7 +243,7 @@ async def test_copy_folders_from_valid_project_with_one_large_file(
241243
product_name,
242244
src_project,
243245
dst_project,
244-
nodes_map={NodeID(i): NodeID(j) for i, j in nodes_map.items()},
246+
nodes_map=nodes_map,
245247
)
246248
assert data == jsonable_encoder(
247249
await get_updated_project(sqlalchemy_async_engine, dst_project["uuid"])
@@ -327,9 +329,11 @@ async def test_copy_folders_from_valid_project(
327329
project_params: ProjectWithFilesParams,
328330
):
329331
# 1. create a src project with some files
330-
src_project, _, src_projects_list = await random_project_with_files(project_params)
332+
src_project, src_project_nodes, src_projects_list = await random_project_with_files(
333+
project_params
334+
)
331335
# 2. create a dst project without files
332-
dst_project, nodes_map = clone_project_data(src_project)
336+
dst_project, _, nodes_map = clone_project_data(src_project, src_project_nodes)
333337
dst_project = await create_project(**dst_project)
334338
# copy the project files
335339
data = await _request_copy_folders(
@@ -338,7 +342,7 @@ async def test_copy_folders_from_valid_project(
338342
product_name,
339343
src_project,
340344
dst_project,
341-
nodes_map={NodeID(i): NodeID(j) for i, j in nodes_map.items()},
345+
nodes_map=nodes_map,
342346
)
343347
assert data == jsonable_encoder(
344348
await get_updated_project(sqlalchemy_async_engine, dst_project["uuid"])
@@ -378,13 +382,14 @@ async def _create_and_delete_folders_from_project(
378382
user_id: UserID,
379383
product_name: ProductName,
380384
project: dict[str, Any],
385+
project_nodes: dict[NodeID, dict[str, Any]],
381386
initialized_app: FastAPI,
382387
project_db_creator: Callable,
383388
check_list_files: bool,
384389
*,
385390
client_timeout: datetime.timedelta = datetime.timedelta(seconds=60),
386391
) -> None:
387-
destination_project, nodes_map = clone_project_data(project)
392+
destination_project, _, nodes_map = clone_project_data(project, project_nodes)
388393
await project_db_creator(**destination_project)
389394

390395
# creating a copy
@@ -394,7 +399,7 @@ async def _create_and_delete_folders_from_project(
394399
product_name,
395400
project,
396401
destination_project,
397-
nodes_map={NodeID(i): NodeID(j) for i, j in nodes_map.items()},
402+
nodes_map=nodes_map,
398403
client_timeout=client_timeout,
399404
)
400405

@@ -500,7 +505,7 @@ async def test_create_and_delete_folders_from_project(
500505
mock_datcore_download,
501506
num_concurrent_calls: int,
502507
):
503-
project_in_db, _, _ = with_random_project_with_files
508+
project_in_db, project_nodes_in_db, _ = with_random_project_with_files
504509
# NOTE: here the point is to NOT have a limit on the number of calls!!
505510
await asyncio.gather(
506511
*[
@@ -510,6 +515,7 @@ async def test_create_and_delete_folders_from_project(
510515
user_id,
511516
product_name,
512517
project_in_db,
518+
project_nodes_in_db,
513519
initialized_app,
514520
create_project,
515521
check_list_files=False,

0 commit comments

Comments
 (0)