Skip to content

Commit ec24a11

Browse files
committed
moving copy to RPC
1 parent 6667795 commit ec24a11

File tree

2 files changed

+103
-123
lines changed

2 files changed

+103
-123
lines changed

services/storage/tests/unit/test_handlers_simcore_s3.py

Lines changed: 0 additions & 122 deletions
Original file line numberDiff line numberDiff line change
@@ -14,8 +14,6 @@
1414

1515
import httpx
1616
import pytest
17-
import sqlalchemy as sa
18-
from aws_library.s3 import SimcoreS3API
1917
from faker import Faker
2018
from fastapi import FastAPI
2119
from models_library.api_schemas_storage.storage_schemas import (
@@ -43,7 +41,6 @@
4341
from servicelib.aiohttp import status
4442
from servicelib.fastapi.long_running_tasks.client import long_running_task_request
4543
from settings_library.s3 import S3Settings
46-
from simcore_postgres_database.storage_models import file_meta_data
4744
from simcore_service_storage.models import SearchFilesQueryParams
4845
from simcore_service_storage.simcore_s3_dsm import SimcoreS3DataManager
4946
from sqlalchemy.ext.asyncio import AsyncEngine
@@ -123,125 +120,6 @@ async def _request_copy_folders(
123120
pytest.fail(reason="Copy folders failed!")
124121

125122

126-
async def test_copy_folders_from_empty_project(
127-
initialized_app: FastAPI,
128-
client: httpx.AsyncClient,
129-
user_id: UserID,
130-
create_project: Callable[[], Awaitable[dict[str, Any]]],
131-
sqlalchemy_async_engine: AsyncEngine,
132-
storage_s3_client: SimcoreS3API,
133-
):
134-
# we will copy from src to dst
135-
src_project = await create_project()
136-
dst_project = await create_project()
137-
138-
data = await _request_copy_folders(
139-
initialized_app,
140-
client,
141-
user_id,
142-
src_project,
143-
dst_project,
144-
nodes_map={},
145-
)
146-
assert data == jsonable_encoder(dst_project)
147-
# check there is nothing in the dst project
148-
async with sqlalchemy_async_engine.connect() as conn:
149-
num_entries = await conn.scalar(
150-
sa.select(sa.func.count())
151-
.select_from(file_meta_data)
152-
.where(file_meta_data.c.project_id == dst_project["uuid"])
153-
)
154-
assert num_entries == 0
155-
156-
157-
@pytest.fixture
158-
def short_dsm_cleaner_interval(monkeypatch: pytest.MonkeyPatch) -> int:
159-
monkeypatch.setenv("STORAGE_CLEANER_INTERVAL_S", "1")
160-
return 1
161-
162-
163-
@pytest.mark.parametrize(
164-
"location_id",
165-
[SimcoreS3DataManager.get_location_id()],
166-
ids=[SimcoreS3DataManager.get_location_name()],
167-
indirect=True,
168-
)
169-
@pytest.mark.parametrize(
170-
"project_params",
171-
[
172-
ProjectWithFilesParams(
173-
num_nodes=1,
174-
allowed_file_sizes=(TypeAdapter(ByteSize).validate_python("210Mib"),),
175-
allowed_file_checksums=(
176-
TypeAdapter(SHA256Str).validate_python(
177-
"0b3216d95ec5a36c120ba16c88911dcf5ff655925d0fbdbc74cf95baf86de6fc"
178-
),
179-
),
180-
workspace_files_count=0,
181-
),
182-
],
183-
ids=str,
184-
)
185-
async def test_copy_folders_from_valid_project_with_one_large_file(
186-
initialized_app: FastAPI,
187-
short_dsm_cleaner_interval: int,
188-
client: httpx.AsyncClient,
189-
user_id: UserID,
190-
create_project: Callable[[], Awaitable[dict[str, Any]]],
191-
sqlalchemy_async_engine: AsyncEngine,
192-
random_project_with_files: Callable[
193-
[ProjectWithFilesParams],
194-
Awaitable[
195-
tuple[dict[str, Any], dict[NodeID, dict[SimcoreS3FileID, FileIDDict]]]
196-
],
197-
],
198-
project_params: ProjectWithFilesParams,
199-
):
200-
# 1. create a src project with 1 large file
201-
src_project, src_projects_list = await random_project_with_files(project_params)
202-
# 2. create a dst project without files
203-
dst_project, nodes_map = clone_project_data(src_project)
204-
dst_project = await create_project(**dst_project)
205-
# copy the project files
206-
data = await _request_copy_folders(
207-
initialized_app,
208-
client,
209-
user_id,
210-
src_project,
211-
dst_project,
212-
nodes_map={NodeID(i): NodeID(j) for i, j in nodes_map.items()},
213-
)
214-
assert data == jsonable_encoder(
215-
await get_updated_project(sqlalchemy_async_engine, dst_project["uuid"])
216-
)
217-
# check that file meta data was effectively copied
218-
for src_node_id in src_projects_list:
219-
dst_node_id = nodes_map.get(
220-
TypeAdapter(NodeIDStr).validate_python(f"{src_node_id}")
221-
)
222-
assert dst_node_id
223-
for src_file_id, src_file in src_projects_list[src_node_id].items():
224-
path: Any = src_file["path"]
225-
assert isinstance(path, Path)
226-
checksum: Any = src_file["sha256_checksum"]
227-
assert isinstance(checksum, str)
228-
await assert_file_meta_data_in_db(
229-
sqlalchemy_async_engine,
230-
file_id=TypeAdapter(SimcoreS3FileID).validate_python(
231-
f"{src_file_id}".replace(
232-
f"{src_project['uuid']}", dst_project["uuid"]
233-
).replace(f"{src_node_id}", f"{dst_node_id}")
234-
),
235-
expected_entry_exists=True,
236-
expected_file_size=path.stat().st_size,
237-
expected_upload_id=None,
238-
expected_upload_expiration_date=None,
239-
expected_sha256_checksum=TypeAdapter(SHA256Str).validate_python(
240-
checksum
241-
),
242-
)
243-
244-
245123
@pytest.mark.parametrize(
246124
"location_id",
247125
[SimcoreS3DataManager.get_location_id()],

services/storage/tests/unit/test_rpc_handlers_simcore_s3.py

Lines changed: 103 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -2,6 +2,7 @@
22
import logging
33
from collections.abc import Awaitable, Callable
44
from copy import deepcopy
5+
from pathlib import Path
56
from typing import Any
67

78
import pytest
@@ -13,15 +14,27 @@
1314
from models_library.api_schemas_rpc_async_jobs.async_jobs import AsyncJobResult
1415
from models_library.api_schemas_storage import STORAGE_RPC_NAMESPACE
1516
from models_library.api_schemas_storage.storage_schemas import FoldersBody
16-
from models_library.projects_nodes_io import NodeID
17+
from models_library.basic_types import SHA256Str
18+
from models_library.projects_nodes_io import NodeID, NodeIDStr, SimcoreS3FileID
1719
from models_library.users import UserID
20+
from pydantic import ByteSize, TypeAdapter
1821
from pytest_simcore.helpers.logging_tools import log_context
22+
from pytest_simcore.helpers.storage_utils import (
23+
FileIDDict,
24+
ProjectWithFilesParams,
25+
get_updated_project,
26+
)
27+
from pytest_simcore.helpers.storage_utils_file_meta_data import (
28+
assert_file_meta_data_in_db,
29+
)
30+
from pytest_simcore.helpers.storage_utils_project import clone_project_data
1931
from servicelib.rabbitmq._client_rpc import RabbitMQRPCClient
2032
from servicelib.rabbitmq.rpc_interfaces.async_jobs.async_jobs import wait_and_get_result
2133
from servicelib.rabbitmq.rpc_interfaces.storage.simcore_s3 import (
2234
copy_folders_from_project,
2335
)
2436
from simcore_postgres_database.storage_models import file_meta_data
37+
from simcore_service_storage.simcore_s3_dsm import SimcoreS3DataManager
2538
from sqlalchemy.ext.asyncio import AsyncEngine
2639

2740
pytest_simcore_core_services_selection = ["postgres", "rabbit"]
@@ -145,3 +158,92 @@ async def test_copy_folders_from_empty_project(
145158
.where(file_meta_data.c.project_id == dst_project["uuid"])
146159
)
147160
assert num_entries == 0
161+
162+
163+
@pytest.fixture
164+
def short_dsm_cleaner_interval(monkeypatch: pytest.MonkeyPatch) -> int:
165+
monkeypatch.setenv("STORAGE_CLEANER_INTERVAL_S", "1")
166+
return 1
167+
168+
169+
@pytest.mark.parametrize(
170+
"location_id",
171+
[SimcoreS3DataManager.get_location_id()],
172+
ids=[SimcoreS3DataManager.get_location_name()],
173+
indirect=True,
174+
)
175+
@pytest.mark.parametrize(
176+
"project_params",
177+
[
178+
ProjectWithFilesParams(
179+
num_nodes=1,
180+
allowed_file_sizes=(TypeAdapter(ByteSize).validate_python("210Mib"),),
181+
allowed_file_checksums=(
182+
TypeAdapter(SHA256Str).validate_python(
183+
"0b3216d95ec5a36c120ba16c88911dcf5ff655925d0fbdbc74cf95baf86de6fc"
184+
),
185+
),
186+
workspace_files_count=0,
187+
),
188+
],
189+
ids=str,
190+
)
191+
async def test_copy_folders_from_valid_project_with_one_large_file(
192+
initialized_app: FastAPI,
193+
short_dsm_cleaner_interval: int,
194+
storage_rabbitmq_rpc_client: RabbitMQRPCClient,
195+
user_id: UserID,
196+
product_name: str,
197+
create_project: Callable[[], Awaitable[dict[str, Any]]],
198+
sqlalchemy_async_engine: AsyncEngine,
199+
random_project_with_files: Callable[
200+
[ProjectWithFilesParams],
201+
Awaitable[
202+
tuple[dict[str, Any], dict[NodeID, dict[SimcoreS3FileID, FileIDDict]]]
203+
],
204+
],
205+
project_params: ProjectWithFilesParams,
206+
):
207+
# 1. create a src project with 1 large file
208+
src_project, src_projects_list = await random_project_with_files(project_params)
209+
# 2. create a dst project without files
210+
dst_project, nodes_map = clone_project_data(src_project)
211+
dst_project = await create_project(**dst_project)
212+
# copy the project files
213+
data = await _request_copy_folders(
214+
storage_rabbitmq_rpc_client,
215+
user_id,
216+
product_name,
217+
src_project,
218+
dst_project,
219+
nodes_map={NodeID(i): NodeID(j) for i, j in nodes_map.items()},
220+
)
221+
assert data == jsonable_encoder(
222+
await get_updated_project(sqlalchemy_async_engine, dst_project["uuid"])
223+
)
224+
# check that file meta data was effectively copied
225+
for src_node_id in src_projects_list:
226+
dst_node_id = nodes_map.get(
227+
TypeAdapter(NodeIDStr).validate_python(f"{src_node_id}")
228+
)
229+
assert dst_node_id
230+
for src_file_id, src_file in src_projects_list[src_node_id].items():
231+
path: Any = src_file["path"]
232+
assert isinstance(path, Path)
233+
checksum: Any = src_file["sha256_checksum"]
234+
assert isinstance(checksum, str)
235+
await assert_file_meta_data_in_db(
236+
sqlalchemy_async_engine,
237+
file_id=TypeAdapter(SimcoreS3FileID).validate_python(
238+
f"{src_file_id}".replace(
239+
f"{src_project['uuid']}", dst_project["uuid"]
240+
).replace(f"{src_node_id}", f"{dst_node_id}")
241+
),
242+
expected_entry_exists=True,
243+
expected_file_size=path.stat().st_size,
244+
expected_upload_id=None,
245+
expected_upload_expiration_date=None,
246+
expected_sha256_checksum=TypeAdapter(SHA256Str).validate_python(
247+
checksum
248+
),
249+
)

0 commit comments

Comments
 (0)