|
11 | 11 | from collections.abc import Awaitable, Callable |
12 | 12 | from pathlib import Path |
13 | 13 | from typing import Any, TypeAlias |
| 14 | +from urllib.parse import quote |
14 | 15 |
|
15 | 16 | import httpx |
16 | 17 | import pytest |
17 | | -from faker import Faker |
| 18 | +import sqlalchemy as sa |
18 | 19 | from fastapi import FastAPI, status |
19 | 20 | from fastapi_pagination.cursor import CursorPage |
20 | 21 | from models_library.api_schemas_storage.storage_schemas import PathMetaDataGet |
|
24 | 25 | from pytest_simcore.helpers.fastapi import url_from_operation_id |
25 | 26 | from pytest_simcore.helpers.httpx_assert_checks import assert_status |
26 | 27 | from pytest_simcore.helpers.storage_utils import FileIDDict, ProjectWithFilesParams |
| 28 | +from simcore_postgres_database.models.projects import projects |
| 29 | +from sqlalchemy.ext.asyncio import AsyncEngine |
27 | 30 |
|
28 | 31 | pytest_simcore_core_services_selection = ["postgres"] |
29 | 32 | pytest_simcore_ops_services_selection = ["adminer"] |
@@ -143,7 +146,6 @@ async def test_list_paths_pagination( |
143 | 146 | dict[str, Any], |
144 | 147 | dict[NodeID, dict[SimcoreS3FileID, FileIDDict]], |
145 | 148 | ], |
146 | | - faker: Faker, |
147 | 149 | ): |
148 | 150 | project, list_of_files = with_random_project_with_files |
149 | 151 | num_nodes = len(list(project["workbench"])) |
@@ -361,3 +363,135 @@ async def test_list_paths( |
361 | 363 | expected_paths=expected_paths, |
362 | 364 | check_total=False, |
363 | 365 | ) |
| 366 | + |
| 367 | + |
| 368 | +@pytest.mark.parametrize( |
| 369 | + "project_params", |
| 370 | + [ |
| 371 | + ProjectWithFilesParams( |
| 372 | + num_nodes=1, |
| 373 | + allowed_file_sizes=(TypeAdapter(ByteSize).validate_python("0b"),), |
| 374 | + workspace_files_count=10, |
| 375 | + ) |
| 376 | + ], |
| 377 | + ids=str, |
| 378 | +) |
| 379 | +async def test_list_paths_with_display_name_containing_slashes( |
| 380 | + initialized_app: FastAPI, |
| 381 | + client: httpx.AsyncClient, |
| 382 | + location_id: LocationID, |
| 383 | + user_id: UserID, |
| 384 | + with_random_project_with_files: tuple[ |
| 385 | + dict[str, Any], |
| 386 | + dict[NodeID, dict[SimcoreS3FileID, FileIDDict]], |
| 387 | + ], |
| 388 | + sqlalchemy_async_engine: AsyncEngine, |
| 389 | +): |
| 390 | + project, list_of_files = with_random_project_with_files |
| 391 | + project_name_with_slashes = "soméà$èq¨thing with/ slas/h/es/" |
| 392 | + node_name_with_non_ascii = "my node / is not ascii: éàèù" |
| 393 | + # adjust project to contain "difficult" characters |
| 394 | + async with sqlalchemy_async_engine.begin() as conn: |
| 395 | + result = await conn.execute( |
| 396 | + sa.update(projects) |
| 397 | + .where(projects.c.uuid == project["uuid"]) |
| 398 | + .values(name=project_name_with_slashes) |
| 399 | + .returning(sa.literal_column(f"{projects.c.name}, {projects.c.workbench}")) |
| 400 | + ) |
| 401 | + row = result.one() |
| 402 | + assert row.name == project_name_with_slashes |
| 403 | + project_workbench = row.workbench |
| 404 | + assert len(project_workbench) == 1 |
| 405 | + node = next(iter(project_workbench.values())) |
| 406 | + node["label"] = node_name_with_non_ascii |
| 407 | + result = await conn.execute( |
| 408 | + sa.update(projects) |
| 409 | + .where(projects.c.uuid == project["uuid"]) |
| 410 | + .values(workbench=project_workbench) |
| 411 | + .returning(sa.literal_column(f"{projects.c.name}, {projects.c.workbench}")) |
| 412 | + ) |
| 413 | + row = result.one() |
| 414 | + |
| 415 | + # ls the root |
| 416 | + file_filter = None |
| 417 | + expected_paths = [(Path(project["uuid"]), False)] |
| 418 | + |
| 419 | + page_of_paths = await _assert_list_paths( |
| 420 | + initialized_app, |
| 421 | + client, |
| 422 | + location_id, |
| 423 | + user_id, |
| 424 | + file_filter=file_filter, |
| 425 | + expected_paths=expected_paths, |
| 426 | + ) |
| 427 | + |
| 428 | + assert page_of_paths.items[0].display_path == Path( |
| 429 | + quote(project_name_with_slashes, safe="") |
| 430 | + ), "display path parts should be url encoded" |
| 431 | + |
| 432 | + # ls the nodes to ensure / is still there between project and node |
| 433 | + file_filter = Path(project["uuid"]) |
| 434 | + expected_paths = sorted( |
| 435 | + ((file_filter / node_key, False) for node_key in project["workbench"]), |
| 436 | + key=lambda x: x[0], |
| 437 | + ) |
| 438 | + assert len(expected_paths) == 1, "test configuration problem" |
| 439 | + page_of_paths = await _assert_list_paths( |
| 440 | + initialized_app, |
| 441 | + client, |
| 442 | + location_id, |
| 443 | + user_id, |
| 444 | + file_filter=file_filter, |
| 445 | + expected_paths=expected_paths, |
| 446 | + ) |
| 447 | + assert page_of_paths.items[0].display_path == Path( |
| 448 | + quote(project_name_with_slashes, safe="") |
| 449 | + ) / quote( |
| 450 | + node_name_with_non_ascii, safe="" |
| 451 | + ), "display path parts should be url encoded" |
| 452 | + |
| 453 | + # ls in the node workspace |
| 454 | + selected_node_id = NodeID(random.choice(list(project["workbench"]))) # noqa: S311 |
| 455 | + selected_node_s3_keys = [ |
| 456 | + Path(s3_object_id) for s3_object_id in list_of_files[selected_node_id] |
| 457 | + ] |
| 458 | + workspace_file_filter = file_filter / f"{selected_node_id}" / "workspace" |
| 459 | + expected_paths = _filter_and_group_paths_one_level_deeper( |
| 460 | + selected_node_s3_keys, workspace_file_filter |
| 461 | + ) |
| 462 | + await _assert_list_paths( |
| 463 | + initialized_app, |
| 464 | + client, |
| 465 | + location_id, |
| 466 | + user_id, |
| 467 | + file_filter=workspace_file_filter, |
| 468 | + expected_paths=expected_paths, |
| 469 | + check_total=False, |
| 470 | + ) |
| 471 | + |
| 472 | + # ls in until we get to some files |
| 473 | + while selected_subfolders := [p for p in expected_paths if p[1] is False]: |
| 474 | + selected_path_filter = random.choice(selected_subfolders) # noqa: S311 |
| 475 | + expected_paths = _filter_and_group_paths_one_level_deeper( |
| 476 | + selected_node_s3_keys, selected_path_filter[0] |
| 477 | + ) |
| 478 | + page_of_paths = await _assert_list_paths( |
| 479 | + initialized_app, |
| 480 | + client, |
| 481 | + location_id, |
| 482 | + user_id, |
| 483 | + file_filter=selected_path_filter[0], |
| 484 | + expected_paths=expected_paths, |
| 485 | + check_total=False, |
| 486 | + ) |
| 487 | + |
| 488 | + expected_display_path = "/".join( |
| 489 | + [ |
| 490 | + quote(project_name_with_slashes, safe=""), |
| 491 | + quote(node_name_with_non_ascii, safe=""), |
| 492 | + *(expected_paths[0][0].parts[2:]), |
| 493 | + ], |
| 494 | + ) |
| 495 | + assert page_of_paths.items[0].display_path == Path( |
| 496 | + expected_display_path |
| 497 | + ), "display path parts should be url encoded" |
0 commit comments