Skip to content

Commit 5841cf7

Browse files
authored
fix(clp-package): Use generic archive directory as mount target in native/dataset_manager.py to match path in metadata database (fixes #1526). (#1528)
1 parent 6d04055 commit 5841cf7

File tree

2 files changed

+12
-11
lines changed

2 files changed

+12
-11
lines changed

components/clp-package-utils/clp_package_utils/scripts/dataset_manager.py

Lines changed: 10 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -18,6 +18,8 @@
1818
from clp_py_utils.s3_utils import generate_container_auth_options
1919

2020
from clp_package_utils.general import (
21+
DockerMount,
22+
DockerMountType,
2123
dump_container_config,
2224
generate_container_config,
2325
generate_container_name,
@@ -138,7 +140,14 @@ def main(argv: List[str]) -> int:
138140

139141
necessary_mounts = [mounts.logs_dir]
140142
if clp_config.archive_output.storage.type == StorageType.FS:
141-
necessary_mounts.append(mounts.archives_output_dir)
143+
container_archive_output_config = container_clp_config.archive_output.model_copy(deep=True)
144+
container_archive_output_config.storage.transform_for_container()
145+
archives_output_dir_mount = DockerMount(
146+
DockerMountType.BIND,
147+
clp_config.archive_output.get_directory(),
148+
container_archive_output_config.get_directory(),
149+
)
150+
necessary_mounts.append(archives_output_dir_mount)
142151

143152
aws_mount, aws_env_vars = generate_container_auth_options(
144153
clp_config, ARCHIVE_MANAGER_ACTION_NAME

components/clp-package-utils/clp_package_utils/scripts/native/dataset_manager.py

Lines changed: 2 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -113,7 +113,7 @@ def _try_deleting_archives(
113113
archive_storage_config = archive_output_config.storage
114114
storage_type = archive_storage_config.type
115115
if StorageType.FS == storage_type:
116-
_try_deleting_archives_from_fs(archive_output_config, dataset_archive_storage_dir)
116+
_try_deleting_archives_from_fs(dataset_archive_storage_dir)
117117
elif StorageType.S3 == storage_type:
118118
_try_deleting_archives_from_s3(
119119
archive_storage_config.s3_config, dataset_archive_storage_dir
@@ -122,16 +122,8 @@ def _try_deleting_archives(
122122
raise ValueError(f"Unsupported storage type: {storage_type}")
123123

124124

125-
def _try_deleting_archives_from_fs(
126-
archive_output_config: ArchiveOutput, dataset_archive_storage_dir: str
127-
) -> None:
128-
archives_dir = archive_output_config.get_directory()
125+
def _try_deleting_archives_from_fs(dataset_archive_storage_dir: str) -> None:
129126
dataset_archive_storage_path = Path(dataset_archive_storage_dir).resolve()
130-
if not dataset_archive_storage_path.is_relative_to(archives_dir):
131-
raise ValueError(
132-
f"'{dataset_archive_storage_path}' is not within top-level archive storage directory"
133-
f" '{archives_dir}'"
134-
)
135127

136128
if not dataset_archive_storage_path.exists():
137129
logger.debug(f"'{dataset_archive_storage_path}' doesn't exist.")

0 commit comments

Comments
 (0)