|
7 | 7 | from contextlib import asynccontextmanager |
8 | 8 | from pathlib import Path |
9 | 9 | from random import choice, randint |
10 | | -from typing import Any |
| 10 | +from typing import Any, cast |
11 | 11 |
|
12 | 12 | import pytest |
13 | 13 | import sqlalchemy as sa |
14 | 14 | from faker import Faker |
15 | 15 | from models_library.basic_types import SHA256Str |
16 | 16 | from models_library.projects import ProjectAtDB, ProjectID |
17 | | -from models_library.projects_nodes_io import NodeID, SimcoreS3FileID |
| 17 | +from models_library.projects_nodes_io import NodeID, SimcoreS3FileID, StorageFileID |
18 | 18 | from models_library.users import UserID |
19 | 19 | from pydantic import ByteSize, TypeAdapter |
20 | 20 | from pytest_simcore.helpers.faker_factories import random_project, random_user |
@@ -259,6 +259,39 @@ async def _creator( |
259 | 259 | return _creator |
260 | 260 |
|
261 | 261 |
|
| 262 | +async def _upload_file_and_update_project( |
| 263 | + project_id: ProjectID, |
| 264 | + node_id: NodeID, |
| 265 | + *, |
| 266 | + file_name: str | None, |
| 267 | + file_id: StorageFileID | None, |
| 268 | + file_sizes: tuple[ByteSize, ...], |
| 269 | + file_checksums: tuple[SHA256Str, ...], |
| 270 | + node_to_files_mapping: dict[NodeID, dict[SimcoreS3FileID, dict[str, Path | str]]], |
| 271 | + upload_file: Callable[..., Awaitable[tuple[Path, SimcoreS3FileID]]], |
| 272 | + create_simcore_file_id: Callable[ |
| 273 | + [ProjectID, NodeID, str, Path | None], SimcoreS3FileID |
| 274 | + ], |
| 275 | + faker: Faker, |
| 276 | +) -> None: |
| 277 | + if file_name is None: |
| 278 | + file_name = faker.file_name() |
| 279 | + file_id = create_simcore_file_id(project_id, node_id, file_name, None) |
| 280 | + checksum: SHA256Str = choice(file_checksums) # noqa: S311 |
| 281 | + src_file, _ = await upload_file( |
| 282 | + file_size=choice(file_sizes), # noqa: S311 |
| 283 | + file_name=file_name, |
| 284 | + file_id=file_id, |
| 285 | + sha256_checksum=checksum, |
| 286 | + ) |
| 287 | + assert file_name is not None |
| 288 | + assert file_id is not None |
| 289 | + node_to_files_mapping[node_id][file_id] = { |
| 290 | + "path": src_file, |
| 291 | + "sha256_checksum": checksum, |
| 292 | + } |
| 293 | + |
| 294 | + |
262 | 295 | @pytest.fixture |
263 | 296 | async def random_project_with_files( |
264 | 297 | sqlalchemy_async_engine: AsyncEngine, |
@@ -296,71 +329,66 @@ async def _creator( |
296 | 329 | ) -> tuple[ProjectAtDB, dict[NodeID, dict[SimcoreS3FileID, dict[str, Path | str]]]]: |
297 | 330 | assert len(file_sizes) == len(file_checksums) |
298 | 331 | project = await create_project(name="random-project") |
299 | | - src_projects_list: dict[ |
| 332 | + node_to_files_mapping: dict[ |
300 | 333 | NodeID, dict[SimcoreS3FileID, dict[str, Path | str]] |
301 | 334 | ] = {} |
302 | 335 | upload_tasks: deque[Awaitable] = deque() |
303 | 336 | for _node_index in range(num_nodes): |
304 | | - # NOTE: we put some more outputs in there to simulate a real case better |
305 | | - new_node_id = NodeID(f"{faker.uuid4()}") |
| 337 | + # Create a node with outputs (files and others) |
| 338 | + project_id = ProjectID(project["uuid"]) |
| 339 | + node_id = cast(NodeID, faker.uuid4(cast_to=None)) |
| 340 | + output3_file_name = faker.file_name() |
306 | 341 | output3_file_id = create_simcore_file_id( |
307 | | - ProjectID(project["uuid"]), |
308 | | - new_node_id, |
309 | | - faker.file_name(), |
310 | | - Path("outputs/output3"), |
| 342 | + project_id, node_id, output3_file_name, Path("outputs/output_3") |
311 | 343 | ) |
312 | | - src_node_id = await create_project_node( |
| 344 | + created_node_id = await create_project_node( |
313 | 345 | ProjectID(project["uuid"]), |
314 | | - new_node_id, |
| 346 | + node_id, |
315 | 347 | outputs={ |
316 | 348 | "output_1": faker.pyint(), |
317 | 349 | "output_2": faker.pystr(), |
318 | 350 | "output_3": f"{output3_file_id}", |
319 | 351 | }, |
320 | 352 | ) |
321 | | - assert src_node_id == new_node_id |
322 | | - |
323 | | - # upload the output 3 and some random other files at the root of each node |
324 | | - src_projects_list[src_node_id] = {} |
325 | | - checksum: SHA256Str = choice(file_checksums) # noqa: S311 |
326 | | - src_file, _ = await upload_file( |
327 | | - file_size=choice(file_sizes), # noqa: S311 |
328 | | - file_name=Path(output3_file_id).name, |
329 | | - file_id=output3_file_id, |
330 | | - sha256_checksum=checksum, |
331 | | - ) |
332 | | - src_projects_list[src_node_id][output3_file_id] = { |
333 | | - "path": src_file, |
334 | | - "sha256_checksum": checksum, |
335 | | - } |
336 | | - |
337 | | - async def _upload_file_and_update_project(project, src_node_id): |
338 | | - src_file_name = faker.file_name() |
339 | | - src_file_uuid = create_simcore_file_id( |
340 | | - ProjectID(project["uuid"]), src_node_id, src_file_name, None |
| 353 | + assert created_node_id == node_id |
| 354 | + |
| 355 | + node_to_files_mapping[created_node_id] = {} |
| 356 | + upload_tasks.append( |
| 357 | + _upload_file_and_update_project( |
| 358 | + project_id, |
| 359 | + node_id, |
| 360 | + file_name=output3_file_name, |
| 361 | + file_id=output3_file_id, |
| 362 | + file_sizes=file_sizes, |
| 363 | + file_checksums=file_checksums, |
| 364 | + upload_file=upload_file, |
| 365 | + create_simcore_file_id=create_simcore_file_id, |
| 366 | + faker=faker, |
| 367 | + node_to_files_mapping=node_to_files_mapping, |
341 | 368 | ) |
342 | | - checksum: SHA256Str = choice(file_checksums) # noqa: S311 |
343 | | - src_file, _ = await upload_file( |
344 | | - file_size=choice(file_sizes), # noqa: S311 |
345 | | - file_name=src_file_name, |
346 | | - file_id=src_file_uuid, |
347 | | - sha256_checksum=checksum, |
348 | | - ) |
349 | | - src_projects_list[src_node_id][src_file_uuid] = { |
350 | | - "path": src_file, |
351 | | - "sha256_checksum": checksum, |
352 | | - } |
| 369 | + ) |
353 | 370 |
|
354 | | - # add a few random files in the node storage |
| 371 | + # add a few random files in the node workspace |
355 | 372 | upload_tasks.extend( |
356 | 373 | [ |
357 | | - _upload_file_and_update_project(project, src_node_id) |
| 374 | + _upload_file_and_update_project( |
| 375 | + project_id, |
| 376 | + node_id, |
| 377 | + file_name=None, |
| 378 | + file_id=None, |
| 379 | + file_sizes=file_sizes, |
| 380 | + file_checksums=file_checksums, |
| 381 | + upload_file=upload_file, |
| 382 | + create_simcore_file_id=create_simcore_file_id, |
| 383 | + faker=faker, |
| 384 | + node_to_files_mapping=node_to_files_mapping, |
| 385 | + ) |
358 | 386 | for _ in range(randint(0, 3)) # noqa: S311 |
359 | 387 | ] |
360 | 388 | ) |
361 | 389 | await limited_gather(*upload_tasks, limit=10) |
362 | 390 |
|
363 | 391 | project = await get_updated_project(sqlalchemy_async_engine, project["uuid"]) |
364 | | - return project, src_projects_list |
| 392 | + return project, node_to_files_mapping |
365 | 393 |
|
366 | 394 | return _creator |
0 commit comments