|
| 1 | +import logging |
1 | 2 | from contextlib import suppress |
2 | 3 | from pathlib import Path |
3 | 4 | from typing import TypeAlias |
|
7 | 8 | from aws_library.s3 import S3MetaData, SimcoreS3API |
8 | 9 | from aws_library.s3._constants import STREAM_READER_CHUNK_SIZE |
9 | 10 | from aws_library.s3._models import S3ObjectKey |
| 11 | +from fastapi import FastAPI |
10 | 12 | from models_library.api_schemas_storage.storage_schemas import S3BucketName |
11 | 13 | from models_library.projects import ProjectID |
12 | 14 | from models_library.projects_nodes_io import ( |
|
25 | 27 | from ..constants import EXPORTS_S3_PREFIX |
26 | 28 | from ..exceptions.errors import FileMetaDataNotFoundError, ProjectAccessRightError |
27 | 29 | from ..models import FileMetaData, FileMetaDataAtDB, GenericCursor, PathMetaData |
| 30 | +from ..modules.db import get_db_engine |
28 | 31 | from ..modules.db.access_layer import AccessLayerRepository |
29 | 32 | from ..modules.db.file_meta_data import FileMetaDataRepository, TotalChildren |
| 33 | +from ..modules.db.projects import NodeIDStr, ProjectIDStr, ProjectRepository |
| 34 | +from ..modules.s3 import get_s3_client |
30 | 35 | from .utils import convert_db_to_model |
31 | 36 |
|
| 37 | +_logger = logging.getLogger(__name__) |
| 38 | + |
32 | 39 |
|
33 | 40 | async def _list_all_files_in_folder( |
34 | 41 | *, |
@@ -165,17 +172,58 @@ def _base_path_parent(base_path: UserSelectionStr, s3_object: S3ObjectKey) -> st |
165 | 172 | return f"{result}" |
166 | 173 |
|
167 | 174 |
|
| 175 | +def _get_project_ids(user_selecton: set[UserSelectionStr]) -> list[ProjectID]: |
| 176 | + _logger.error("user_selection=%s", user_selecton) |
| 177 | + results = [] |
| 178 | + for selected in user_selecton: |
| 179 | + project_id = ProjectID(Path(selected).parts[0]) |
| 180 | + results.append(project_id) |
| 181 | + return results |
| 182 | + |
| 183 | + |
| 184 | +def _replace_node_id_project_id_in_path( |
| 185 | + ids_names_map: dict[ProjectID, dict[ProjectIDStr | NodeIDStr, str]], path: str |
| 186 | +) -> str: |
| 187 | + path_parts = Path(path).parts |
| 188 | + if len(path_parts) == 0: |
| 189 | + return path |
| 190 | + |
| 191 | + if len(path_parts) == 1: |
| 192 | + return ids_names_map[ProjectID(path)][path].replace("/", "_") |
| 193 | + |
| 194 | + project_id_str = path_parts[0] |
| 195 | + project_id = ProjectID(project_id_str) |
| 196 | + node_id_str = path_parts[1] |
| 197 | + return "/".join( |
| 198 | + ( |
| 199 | + ids_names_map[project_id][project_id_str].replace("/", "_"), |
| 200 | + ids_names_map[project_id][node_id_str].replace("/", "_"), |
| 201 | + *path_parts[2:], |
| 202 | + ) |
| 203 | + ) |
| 204 | + |
| 205 | + |
168 | 206 | async def create_and_upload_export( |
169 | | - s3_client: SimcoreS3API, |
| 207 | + app: FastAPI, |
170 | 208 | bucket: S3BucketName, |
171 | 209 | *, |
172 | 210 | source_object_keys: set[tuple[UserSelectionStr, StorageFileID]], |
173 | 211 | destination_object_keys: StorageFileID, |
174 | 212 | progress_bar: ProgressBarData, |
175 | 213 | ) -> None: |
| 214 | + s3_client = get_s3_client(app) |
| 215 | + ids_names_map = await ProjectRepository.instance( |
| 216 | + get_db_engine(app) |
| 217 | + ).get_project_id_and_node_id_to_names_map( |
| 218 | + project_uuids=_get_project_ids(user_selecton={x[0] for x in source_object_keys}) |
| 219 | + ) |
| 220 | + |
176 | 221 | archive_entries: ArchiveEntries = [ |
177 | 222 | ( |
178 | | - _base_path_parent(selection, s3_object), |
| 223 | + _base_path_parent( |
| 224 | + _replace_node_id_project_id_in_path(ids_names_map, selection), |
| 225 | + _replace_node_id_project_id_in_path(ids_names_map, s3_object), |
| 226 | + ), |
179 | 227 | await s3_client.get_bytes_streamer_from_object(bucket, s3_object), |
180 | 228 | ) |
181 | 229 | for (selection, s3_object) in source_object_keys |
|
0 commit comments