diff --git a/src/dda/cli/env/dev/fs/__init__.py b/src/dda/cli/env/dev/fs/__init__.py new file mode 100644 index 00000000..803eb829 --- /dev/null +++ b/src/dda/cli/env/dev/fs/__init__.py @@ -0,0 +1,13 @@ +# SPDX-FileCopyrightText: 2024-present Datadog, Inc. +# +# SPDX-License-Identifier: MIT +from __future__ import annotations + +from dda.cli.base import dynamic_group + + +@dynamic_group( + short_help="Interact with the environment's filesystem", +) +def cmd() -> None: + pass diff --git a/src/dda/cli/env/dev/fs/export/__init__.py b/src/dda/cli/env/dev/fs/export/__init__.py new file mode 100644 index 00000000..a9ac385c --- /dev/null +++ b/src/dda/cli/env/dev/fs/export/__init__.py @@ -0,0 +1,73 @@ +# SPDX-FileCopyrightText: 2024-present Datadog, Inc. +# +# SPDX-License-Identifier: MIT +from __future__ import annotations + +from typing import TYPE_CHECKING + +import click + +from dda.cli.base import dynamic_command, pass_app +from dda.cli.env.dev.utils import option_env_type +from dda.utils.fs import Path + +if TYPE_CHECKING: + from dda.cli.application import Application + + +@dynamic_command( + short_help="""Export files and directories from a developer environment""", +) +@option_env_type() +@click.option("--id", "instance", default="default", help="Unique identifier for the environment") +@click.argument("sources", nargs=-1, required=True) +@click.argument("destination", required=True, type=click.Path(resolve_path=True, path_type=Path)) +@click.option("--recursive", "-r", is_flag=True, help="Export files and directories recursively.") +@click.option( + "--force", + "-f", + is_flag=True, + help="Overwrite existing files. Without this option, an error will be raised if the destination file already exists.", +) +@click.option( + "--mkpath", is_flag=True, help="Create the destination directories and their parents if they do not exist." +) +@pass_app +def cmd( + app: Application, + *, + env_type: str, + instance: str, + sources: tuple[str, ...], # Passed as string since they are inside the env filesystem + destination: Path, + recursive: bool, + force: bool, + mkpath: bool, +) -> None: + """ + Export files and directories from a developer environment, using an interface similar to `cp`. + The last path specified is the destination directory on the host filesystem. + """ + from dda.env.dev import get_dev_env + from dda.env.models import EnvironmentState + + env = get_dev_env(env_type)( + app=app, + name=env_type, + instance=instance, + ) + status = env.status() + + # TODO: This might end up depending on the environment type. + # For `linux-container` though, `docker cp` also works on stopped containers. + possible_states = {EnvironmentState.STARTED, EnvironmentState.STOPPED} + if status.state not in possible_states: + app.abort( + f"Developer environment `{env_type}` is in state `{status.state}`, must be one of: " + f"{', '.join(sorted(possible_states))}" + ) + + try: + env.export_files(sources, destination, recursive, force, mkpath) + except Exception as error: # noqa: BLE001 + app.abort(f"Failed to export files: {error}") diff --git a/src/dda/cli/env/dev/fs/import/__init__.py b/src/dda/cli/env/dev/fs/import/__init__.py new file mode 100644 index 00000000..18cdc76d --- /dev/null +++ b/src/dda/cli/env/dev/fs/import/__init__.py @@ -0,0 +1,69 @@ +# SPDX-FileCopyrightText: 2024-present Datadog, Inc. +# +# SPDX-License-Identifier: MIT +from __future__ import annotations + +from typing import TYPE_CHECKING + +import click + +from dda.cli.base import dynamic_command, pass_app +from dda.cli.env.dev.utils import option_env_type +from dda.utils.fs import Path + +if TYPE_CHECKING: + from dda.cli.application import Application + + +@dynamic_command(short_help="""Import files and directories into a developer environment""") +@option_env_type() +@click.option("--id", "instance", default="default", help="Unique identifier for the environment") +@click.argument("sources", nargs=-1, required=True, type=click.Path(exists=True, resolve_path=True, path_type=Path)) +@click.argument("destination", required=True) +@click.option("--recursive", "-r", is_flag=True, help="Import files and directories recursively.") +@click.option( + "--force", + "-f", + is_flag=True, + help="Overwrite existing files. Without this option, an error will be raised if the destination file already exists.", +) +@click.option( + "--mkpath", is_flag=True, help="Create the destination directories and their parents if they do not exist." +) +@pass_app +def cmd( + app: Application, + *, + env_type: str, + instance: str, + sources: tuple[Path, ...], + destination: str, # Passed as string since it is inside the env filesystem + recursive: bool, + force: bool, + mkpath: bool, +) -> None: + """ + Import files and directories into a developer environment, using an interface similar to `cp`. + The last path specified is the destination directory inside the environment. + """ + from dda.env.dev import get_dev_env + from dda.env.models import EnvironmentState + + env = get_dev_env(env_type)( + app=app, + name=env_type, + instance=instance, + ) + status = env.status() + + possible_states = {EnvironmentState.STARTED} + if status.state not in possible_states: + app.abort( + f"Developer environment `{env_type}` is in state `{status.state}`, must be one of: " + f"{', '.join(sorted(possible_states))}" + ) + + try: + env.import_files(sources, destination, recursive, force, mkpath) + except Exception as error: # noqa: BLE001 + app.abort(f"Failed to import files: {error}") diff --git a/src/dda/cli/env/dev/fs/localimport/__init__.py b/src/dda/cli/env/dev/fs/localimport/__init__.py new file mode 100644 index 00000000..be5b8323 --- /dev/null +++ b/src/dda/cli/env/dev/fs/localimport/__init__.py @@ -0,0 +1,35 @@ +# SPDX-FileCopyrightText: 2024-present Datadog, Inc. +# +# SPDX-License-Identifier: MIT +from __future__ import annotations + +import click + +from dda.cli.base import dynamic_command +from dda.utils.fs import Path + + +@dynamic_command(short_help="""Internal command used to call import_from_dir in dev envs.""", hidden=True) +@click.argument( + "source", required=True, type=click.Path(exists=True, resolve_path=True, file_okay=False, path_type=Path) +) +@click.argument("destination", required=True, type=click.Path(resolve_path=True, path_type=Path)) +# Use arguments instead of options to enforce the idea that these are required +@click.argument("recursive", required=True, type=bool) +@click.argument("force", required=True, type=bool) +@click.argument("mkpath", required=True, type=bool) +def cmd( + *, + source: Path, + destination: Path, + recursive: bool, + force: bool, + mkpath: bool, +) -> None: + """ + Internal command used to call import_from_dir in dev envs. + This allows us to use the same semantics for importing files and directories into a dev env as for exporting them on the host filesystem. + """ + from dda.env.dev.fs import import_from_dir + + import_from_dir(source, destination, recursive=recursive, force=force, mkpath=mkpath) diff --git a/src/dda/env/dev/fs.py b/src/dda/env/dev/fs.py new file mode 100644 index 00000000..34f8087b --- /dev/null +++ b/src/dda/env/dev/fs.py @@ -0,0 +1,81 @@ +# SPDX-FileCopyrightText: 2024-present Datadog, Inc. +# +# SPDX-License-Identifier: MIT +from __future__ import annotations + +from typing import TYPE_CHECKING + +if TYPE_CHECKING: + from dda.utils.fs import Path + + +def determine_final_copy_target(source_name: str, source_is_dir: bool, destination_spec: Path) -> Path: # noqa: FBT001 + """ + Determines the final target for a copy operation, given a destination specification and some details about the source. + For example: + - f("file.txt", False, "/tmp/some-dir") -> "/tmp/some-dir/file.txt" (move into directory) + - f("file.txt", False, "/tmp/new-file.txt") -> "/tmp/new-file.txt" (rename file) + - f("some-dir", True, "/tmp/some-dir") -> "/tmp/some-dir/some-dir" (move directory into directory) + + Parameters: + - source_name: The name of the source file or directory. The source is usually inside the env filesystem, not the host. + - source_is_dir: Whether the source is a directory. + - destination_spec: The destination specification, which can be a directory or a file. The destination is usually on the host filesystem. + + Returns: + - The final target path. + """ + + if destination_spec.is_dir(): + # The destination exists and is a directory or a symlink to one + # Always move the source inside it + # TODO: Add a check if destination_spec / source.name is an already-existing file or directory + # Currently shutil.move will fail with an ugly error message when we eventually call it + return destination_spec / source_name + + if destination_spec.is_file(): + # The destination exists and is a file + if source_is_dir: + # Never overwrite a file with a directory + msg = f"Refusing to overwrite existing file with directory: {destination_spec}" + raise ValueError(msg) + # Source and destination are both files - rename + return destination_spec + + # The destination does not exist, assume we want it exactly there + return destination_spec + + +def handle_overwrite(dest: Path, *, force: bool) -> None: + if not dest.exists(): + return + + if dest.is_dir(): + msg = f"Refusing to overwrite directory {dest}." + raise ValueError(msg) + + if not force: + msg = f"Refusing to overwrite existing file: {dest} (force flag is not set)." + raise ValueError(msg) + + dest.unlink() + + +def import_from_dir(source_dir: Path, destination_spec: Path, *, recursive: bool, force: bool, mkpath: bool) -> None: + """ + Import files and directories from a given directory into a destination directory on the "host" filesystem. + "Host" in this context refers to the environment `dda` is being executed in: if that is inside of a dev env, then we mean the dev env's file system. + """ + from shutil import move + + if mkpath: + destination_spec.ensure_dir() + + for element in source_dir.iterdir(): + if not recursive and element.is_dir(): + msg = "Refusing to copy directories as recursive flag is not set" + raise ValueError(msg) + + final_target = determine_final_copy_target(element.name, element.is_dir(), destination_spec) + handle_overwrite(final_target, force=force) + move(element, final_target) diff --git a/src/dda/env/dev/interface.py b/src/dda/env/dev/interface.py index 76058186..cd81afb5 100644 --- a/src/dda/env/dev/interface.py +++ b/src/dda/env/dev/interface.py @@ -163,6 +163,48 @@ def launch_shell(self, *, repo: str | None = None) -> NoReturn: [configured repository][dda.env.dev.interface.DeveloperEnvironmentConfig.repos]. """ + @abstractmethod + def export_files( + self, + sources: tuple[str, ...], # Passed as string since they are inside the env filesystem + destination: Path, + recursive: bool, # noqa: FBT001 + force: bool, # noqa: FBT001 + mkpath: bool, # noqa: FBT001 + ) -> None: + """ + This method exports files from the developer environment to the host filesystem. + + Parameters: + sources: The paths to files/directories in the developer environment to export. + destination: The destination directory on the host filesystem. + recursive: Whether to export files and directories recursively. If False, all sources must be files. + force: Whether to overwrite existing files. Without this option, an error will be raised if the destination file/directory already exists. + mkpath: Whether to create the destination directories and their parents if they do not exist. + """ + raise NotImplementedError + + @abstractmethod + def import_files( + self, + sources: tuple[Path, ...], + destination: str, # Passed as string since it is inside the env filesystem + recursive: bool, # noqa: FBT001 + force: bool, # noqa: FBT001 + mkpath: bool, # noqa: FBT001 + ) -> None: + """ + This method imports files from the host filesystem into the developer environment. + + Parameters: + sources: The paths to files/directories on the host filesystem to import. + destination: The destination directory in the developer environment. + recursive: Whether to import files and directories recursively. If False, all sources must be files. + force: Whether to overwrite existing files. Without this option, an error will be raised if the destination file/directory already exists. + mkpath: Whether to create the destination directories and their parents if they do not exist. + """ + raise NotImplementedError + def launch_gui(self) -> NoReturn: """ This method starts an interactive GUI inside the developer environment using e.g. RDP or VNC. diff --git a/src/dda/env/dev/types/linux_container.py b/src/dda/env/dev/types/linux_container.py index 31f10ea4..bfb15b6c 100644 --- a/src/dda/env/dev/types/linux_container.py +++ b/src/dda/env/dev/types/linux_container.py @@ -10,6 +10,7 @@ import msgspec +from dda.env.dev.fs import import_from_dir from dda.env.dev.interface import DeveloperEnvironmentConfig, DeveloperEnvironmentInterface from dda.utils.git.constants import GitEnvVars @@ -19,6 +20,7 @@ from dda.tools.docker import Docker from dda.utils.container.model import Mount from dda.utils.editors.interface import EditorInterface + from dda.utils.fs import Path class LinuxContainerConfig(DeveloperEnvironmentConfig): @@ -175,6 +177,8 @@ def start(self) -> None: if self.config.arch is not None: command.extend(("--platform", f"linux/{self.config.arch}")) + command.extend(("-v", f"{self.shared_dir}:/.shared")) + for shared_shell_file in self.shell.collect_shared_files(): unix_path = shared_shell_file.relative_to(self.global_shared_dir).as_posix() command.extend(("-v", f"{shared_shell_file}:{self.home_dir}/.shared/{unix_path}")) @@ -445,3 +449,68 @@ def repo_path(self, repo: str | None) -> str: repo = self.default_repo return f"{self.home_dir}/repos/{repo}" + + def _docker_cp(self, source: str, destination: str, cwd: Path | None = None) -> None: + # TODO: Make this a proper method on the Docker tool + self.docker.wait( + ["cp", source, destination], + message=f"Copying file or directory: {source}", + cwd=cwd, + ) + + def export_files( + self, + sources: tuple[str, ...], + destination: Path, + recursive: bool, # noqa: FBT001 + force: bool, # noqa: FBT001 + mkpath: bool, # noqa: FBT001 + ) -> None: + from dda.utils.fs import temp_directory + + # 1. Create a temporary directory on the host filesystem + with temp_directory() as wd: + # 2. Copy the files from the container to the temporary directory using `docker cp` + for source in sources: + self._docker_cp(self.container_name + ":" + source, os.path.basename(source), cwd=wd) + + # 3. Import from the shared dir into the final destination + import_from_dir(wd, destination, recursive=recursive, force=force, mkpath=mkpath) + + def import_files( + self, + sources: tuple[Path, ...], + destination: str, + recursive: bool, # noqa: FBT001 + force: bool, # noqa: FBT001 + mkpath: bool, # noqa: FBT001 + ) -> None: + from shutil import copy2, copytree + + from dda.utils.fs import temp_directory + + # 1. Create a temporary directory in a location that is bind-mounted into the container + with temp_directory(dir=self.shared_dir) as temp_dir: + # 2. Copy the files from the source to the shared directory + for source in sources: + if source.is_dir(): + copytree(source, temp_dir / source.name) + else: + copy2(source, temp_dir / source.name) + + # 3. Run `dda env dev fs localimport` inside the dev env so that the files are copied from this shared directory into their final destination + self.app.subprocess.wait( + self.construct_command([ + "dda", + "env", + "dev", + "fs", + "localimport", + f"/.shared/{temp_dir.name}", # Source = shared directory in the dev env + destination, # Destination = final destination in the dev env + str(recursive), + str(force), + str(mkpath), + ]), + message="Importing files into the dev env...", + ) diff --git a/src/dda/utils/fs.py b/src/dda/utils/fs.py index cb84481e..d32b1f4d 100644 --- a/src/dda/utils/fs.py +++ b/src/dda/utils/fs.py @@ -180,13 +180,15 @@ def hexdigest(self, *, algorithm: str = "sha256", buffer_size: int | None = None @contextmanager -def temp_directory() -> Generator[Path, None, None]: +def temp_directory(dir: str | Path | None = None) -> Generator[Path, None, None]: # noqa: A002 """ A context manager that creates a temporary directory and yields a path to it. Example: ```python with temp_directory() as td: ... + with temp_directory("/tmp") as td: + ... ``` Yields: @@ -194,7 +196,7 @@ def temp_directory() -> Generator[Path, None, None]: """ from tempfile import TemporaryDirectory - with TemporaryDirectory() as d: + with TemporaryDirectory(dir=dir) as d: yield Path(d).resolve() diff --git a/tests/env/dev/fixtures/fs_tests/file_root.txt b/tests/env/dev/fixtures/fs_tests/file_root.txt new file mode 100644 index 00000000..5a18cd2f --- /dev/null +++ b/tests/env/dev/fixtures/fs_tests/file_root.txt @@ -0,0 +1 @@ +source diff --git a/tests/env/dev/fixtures/fs_tests/file_root2.txt b/tests/env/dev/fixtures/fs_tests/file_root2.txt new file mode 100644 index 00000000..5a18cd2f --- /dev/null +++ b/tests/env/dev/fixtures/fs_tests/file_root2.txt @@ -0,0 +1 @@ +source diff --git a/tests/env/dev/fixtures/fs_tests/folder1/file_deep1.txt b/tests/env/dev/fixtures/fs_tests/folder1/file_deep1.txt new file mode 100644 index 00000000..5a18cd2f --- /dev/null +++ b/tests/env/dev/fixtures/fs_tests/folder1/file_deep1.txt @@ -0,0 +1 @@ +source diff --git a/tests/env/dev/fixtures/fs_tests/folder1/subfolder1/file_very_deep1.txt b/tests/env/dev/fixtures/fs_tests/folder1/subfolder1/file_very_deep1.txt new file mode 100644 index 00000000..5a18cd2f --- /dev/null +++ b/tests/env/dev/fixtures/fs_tests/folder1/subfolder1/file_very_deep1.txt @@ -0,0 +1 @@ +source diff --git a/tests/env/dev/fixtures/fs_tests/folder1/subfolder2/file_very_deep2.txt b/tests/env/dev/fixtures/fs_tests/folder1/subfolder2/file_very_deep2.txt new file mode 100644 index 00000000..5a18cd2f --- /dev/null +++ b/tests/env/dev/fixtures/fs_tests/folder1/subfolder2/file_very_deep2.txt @@ -0,0 +1 @@ +source diff --git a/tests/env/dev/fixtures/fs_tests/folder2/file_deep2.txt b/tests/env/dev/fixtures/fs_tests/folder2/file_deep2.txt new file mode 100644 index 00000000..5a18cd2f --- /dev/null +++ b/tests/env/dev/fixtures/fs_tests/folder2/file_deep2.txt @@ -0,0 +1 @@ +source diff --git a/tests/env/dev/test_fs.py b/tests/env/dev/test_fs.py new file mode 100644 index 00000000..6f9fa221 --- /dev/null +++ b/tests/env/dev/test_fs.py @@ -0,0 +1,313 @@ +# SPDX-FileCopyrightText: 2024-present Datadog, Inc. +# +# SPDX-License-Identifier: MIT +from __future__ import annotations + +import pytest + +from dda.env.dev.fs import determine_final_copy_target, handle_overwrite, import_from_dir +from dda.utils.fs import Path + +pytestmark = [pytest.mark.usefixtures("private_storage")] + + +@pytest.fixture +def test_files_root(): + """Folder containing test files to be copied.""" + return Path(__file__).parent / "fixtures" / "fs_tests" + + +@pytest.fixture +def test_source_dir(temp_dir, test_files_root): + """Directory containing source files (simulates intermediate directory from which we import things).""" + import shutil + + source_dir = temp_dir / "source_files" + source_dir.ensure_dir() + # Copy test files into the source directory + for item in test_files_root.iterdir(): + if item.is_dir(): + shutil.copytree(str(item), str(source_dir / item.name)) + else: + shutil.copy2(str(item), str(source_dir / item.name)) + return source_dir + + +@pytest.fixture +def make_subset_dir(temp_dir, test_source_dir): + """Factory fixture to create a subset directory with specific source files.""" + import shutil + + def _make_subset(sources: list[str], name: str = "subset") -> Path: + """Create a directory containing only the specified source files. + + Args: + sources: List of file/directory names to include in the subset + name: Name for the subset directory (default: "subset") + + Returns: + Path to the created subset directory + """ + subset_dir = temp_dir / name + subset_dir.ensure_dir() + for source in sources: + source_path = test_source_dir / source + if source_path.is_dir(): + shutil.copytree(str(source_path), str(subset_dir / source)) + else: + shutil.copy2(str(source_path), str(subset_dir / source)) + return subset_dir + + return _make_subset + + +@pytest.fixture +def test_target_directory(temp_dir): + """Directory where the test files should be copied to.""" + res = temp_dir / "test_target" + res.ensure_dir() + return res + + +@pytest.fixture +def prepare_destination(test_target_directory): + """Factory fixture to prepare a destination path, optionally creating intermediate directories.""" + + def _prepare(destination: str = "", *, create_intermediates: bool = True) -> Path: + """Prepare a destination path for import operations. + + Args: + destination: Relative destination path (empty string means use root target directory) + create_intermediates: If True, create intermediate directories + + Returns: + The prepared destination path + """ + dest_path = test_target_directory / destination if destination else test_target_directory + if create_intermediates and destination and not dest_path.exists(): + dest_path.ensure_dir() + return dest_path + + return _prepare + + +class TestDetermineFinalCopyTarget: + def test_file_into_existing_directory(self, temp_dir): + """When destination is an existing directory, the file should be placed inside it.""" + result = determine_final_copy_target("file.txt", False, temp_dir) + assert result == temp_dir / "file.txt" + + def test_file_to_nonexistent_path(self, temp_dir): + """When destination doesn't exist, it should be used as-is (rename case).""" + nonexistent = temp_dir / "new_name.txt" + result = determine_final_copy_target("file.txt", False, nonexistent) + assert result == nonexistent + + def test_directory_into_existing_directory(self, temp_dir): + """When source is a directory and destination exists, place it inside.""" + result = determine_final_copy_target("mydir", True, temp_dir) + assert result == temp_dir / "mydir" + + def test_directory_to_nonexistent_path(self, temp_dir): + """When source is a directory and destination doesn't exist, use destination as-is.""" + nonexistent = temp_dir / "new_dir" + result = determine_final_copy_target("mydir", True, nonexistent) + assert result == nonexistent + + def test_directory_to_existing_file_fails(self, temp_dir): + """Should raise error when trying to overwrite a file with a directory.""" + existing_file = temp_dir / "existing_file.txt" + existing_file.write_text("content") + + with pytest.raises(ValueError, match="Refusing to overwrite existing file with directory"): + determine_final_copy_target("mydir", True, existing_file) + + def test_file_to_existing_file(self, temp_dir): + """When destination is an existing file, it should be used for renaming.""" + existing_file = temp_dir / "existing_file.txt" + existing_file.write_text("content") + + result = determine_final_copy_target("file.txt", False, existing_file) + assert result == existing_file + + +class TestHandleOverwrite: + """Test the handle_overwrite function.""" + + def test_nonexistent_destination_passes(self, test_target_directory): + """Should pass without errors when destination doesn't exist.""" + nonexistent = test_target_directory / "nonexistent.txt" + handle_overwrite(nonexistent, force=False) # Should not raise + + def test_existing_file_without_force_fails(self, test_target_directory): + """Should raise error when trying to overwrite without force flag.""" + existing_file = test_target_directory / "existing_file.txt" + existing_file.write_text("content") + + with pytest.raises(ValueError, match="Refusing to overwrite existing file:.* \\(force flag is not set\\)"): + handle_overwrite(existing_file, force=False) + + def test_existing_file_with_force_succeeds(self, test_target_directory): + """Should delete the file when force flag is set.""" + existing_file = test_target_directory / "existing_file.txt" + existing_file.write_text("content") + + handle_overwrite(existing_file, force=True) + assert not existing_file.exists() + + def test_existing_directory_always_fails(self, test_target_directory): + """Should raise error when destination is a directory, even with force.""" + existing_dir = test_target_directory / "existing_dir" + existing_dir.ensure_dir() + + with pytest.raises(ValueError, match="Refusing to overwrite directory"): + handle_overwrite(existing_dir, force=False) + + with pytest.raises(ValueError, match="Refusing to overwrite directory"): + handle_overwrite(existing_dir, force=True) + + +class TestImportFromDir: + """Test the import_from_dir function with various scenarios.""" + + @pytest.mark.parametrize( + ("sources", "destination", "expected"), + [ + pytest.param(["file_root.txt"], "", ["file_root.txt"], id="single_file"), + pytest.param(["file_root.txt"], "file_renamed.txt", ["file_renamed.txt"], id="file_rename"), + pytest.param( + ["file_root.txt", "file_root2.txt"], "", ["file_root.txt", "file_root2.txt"], id="multiple_files" + ), + pytest.param( + ["folder1"], + "", + ["folder1", "folder1/file_deep1.txt", "folder1/subfolder1", "folder1/subfolder2"], + id="directory", + ), + pytest.param( + ["file_root.txt", "folder1", "file_root2.txt"], + "", + ["file_root.txt", "file_root2.txt", "folder1", "folder1/file_deep1.txt"], + id="mixed_files_and_directories", + ), + pytest.param( + ["file_root.txt", "folder1"], + "subdir", + ["subdir/file_root.txt", "subdir/folder1", "subdir/folder1/file_deep1.txt"], + id="into_subdir", + ), + ], + ) + def test_import_into_empty_directory( + self, make_subset_dir, prepare_destination, test_target_directory, sources, destination, expected + ): + """Test importing various combinations of files and directories.""" + subset_dir = make_subset_dir(sources) + destination_path = prepare_destination(destination) + + import_from_dir(subset_dir, destination_path, recursive=True, force=False, mkpath=False) + + for expected_file in expected: + assert (test_target_directory / expected_file).exists() + # Verify content for files (not directories) + file_path = test_target_directory / expected_file + if file_path.is_file() and "renamed" not in expected_file: + assert file_path.read_text().strip() == "source" + + class TestRecursiveArg: + """Tests for the recursive flag.""" + + def test_directory_fails_without_flag(self, make_subset_dir, test_target_directory): + """Should raise error when trying to copy directory without recursive flag.""" + subset_dir = make_subset_dir(["folder1"], name="subset_dir_only") + + with pytest.raises(ValueError, match="Refusing to copy directories as recursive flag is not set"): + import_from_dir(subset_dir, test_target_directory, recursive=False, force=False, mkpath=False) + + def test_multiple_directories(self, make_subset_dir, test_target_directory): + """Should successfully copy multiple directories with recursive flag.""" + subset_dir = make_subset_dir(["folder1", "folder2"], name="subset_multi_dir") + + import_from_dir(subset_dir, test_target_directory, recursive=True, force=False, mkpath=False) + + assert (test_target_directory / "folder1").exists() + assert (test_target_directory / "folder1" / "file_deep1.txt").exists() + assert (test_target_directory / "folder2").exists() + assert (test_target_directory / "folder2" / "file_deep2.txt").exists() + + class TestForceArg: + """Tests for the force flag.""" + + def test_overwrite_fails_without_flag(self, make_subset_dir, test_target_directory): + """Should raise error when trying to overwrite without force flag.""" + subset_dir = make_subset_dir(["file_root.txt"], name="subset_single") + + # Create existing file at destination + existing_file = test_target_directory / "file_root.txt" + existing_file.write_text("existing content") + + with pytest.raises(ValueError, match="Refusing to overwrite existing file:.* \\(force flag is not set\\)"): + import_from_dir(subset_dir, test_target_directory, recursive=False, force=False, mkpath=False) + + assert existing_file.read_text() == "existing content" + + def test_overwrite_succeeds_with_flag(self, make_subset_dir, test_target_directory): + """Should successfully overwrite files when force flag is set.""" + subset_dir = make_subset_dir(["file_root.txt"], name="subset_single_overwrite") + + # Create existing file at destination + existing_file = test_target_directory / "file_root.txt" + existing_file.write_text("existing content") + + import_from_dir(subset_dir, test_target_directory, recursive=False, force=True, mkpath=False) + + assert existing_file.read_text().strip() == "source" + + class TestMkpathArg: + """Tests for the mkpath flag.""" + + def test_nonexistent_path_fails_without_mkpath(self, make_subset_dir, test_target_directory): + """Should raise error when destination doesn't exist and mkpath is False.""" + subset_dir = make_subset_dir(["file_root.txt"], name="subset_mkpath_fail") + nonexistent_dir = test_target_directory / "nonexistent" / "deep" / "path" + + with pytest.raises(FileNotFoundError): + import_from_dir(subset_dir, nonexistent_dir, recursive=False, force=False, mkpath=False) + + def test_nonexistent_path_succeeds_with_mkpath(self, make_subset_dir, test_target_directory): + """Should create intermediate directories when mkpath is True.""" + subset_dir = make_subset_dir(["file_root.txt"], name="subset_mkpath_success") + nonexistent_dir = test_target_directory / "nonexistent" / "deep" / "path" + + import_from_dir(subset_dir, nonexistent_dir, recursive=False, force=False, mkpath=True) + + assert nonexistent_dir.exists() + assert (nonexistent_dir / "file_root.txt").exists() + assert (nonexistent_dir / "file_root.txt").read_text().strip() == "source" + + class TestExistingElements: + """Tests for importing to a directory that already contains stuff.""" + + def test_directory_to_existing_directory(self, make_subset_dir, test_target_directory): + """Should place source directory inside existing directory with stuff in it.""" + subset_dir = make_subset_dir(["folder1"], name="subset_into_existing") + + (test_target_directory / "existing_dir").ensure_dir() + (test_target_directory / "existing_dir" / "existing_file.txt").write_text("existing content") + + import_from_dir( + subset_dir, test_target_directory / "existing_dir", recursive=True, force=False, mkpath=False + ) + + assert (test_target_directory / "existing_dir" / "folder1").exists() + assert (test_target_directory / "existing_dir" / "folder1" / "file_deep1.txt").exists() + + def test_directory_to_existing_file_fails(self, make_subset_dir, test_target_directory): + """Should raise error when trying to place directory where a file exists.""" + subset_dir = make_subset_dir(["folder1"], name="subset_dir_to_file") + + existing_file = test_target_directory / "some_file.txt" + existing_file.write_text("existing content") + + with pytest.raises(ValueError, match="Refusing to overwrite existing file with directory"): + import_from_dir(subset_dir, existing_file, recursive=True, force=False, mkpath=False) diff --git a/tests/env/dev/test_interface.py b/tests/env/dev/test_interface.py index 66ea9b73..0a10a827 100644 --- a/tests/env/dev/test_interface.py +++ b/tests/env/dev/test_interface.py @@ -13,6 +13,7 @@ if TYPE_CHECKING: from dda.utils.editors.interface import EditorInterface + from dda.utils.fs import Path pytestmark = [pytest.mark.usefixtures("private_storage")] @@ -33,6 +34,24 @@ def code(self, *, editor: EditorInterface, repo: str | None = None) -> None: ... def run_command(self, command: list[str], *, repo: str | None = None) -> None: ... + def export_files( + self, + sources: tuple[str, ...], + destination: Path, + recursive: bool, # noqa: FBT001 + force: bool, # noqa: FBT001 + mkpath: bool, # noqa: FBT001 + ) -> None: ... + + def import_files( + self, + sources: tuple[Path, ...], + destination: str, + recursive: bool, # noqa: FBT001 + force: bool, # noqa: FBT001 + mkpath: bool, # noqa: FBT001 + ) -> None: ... + def test_storage_dirs(app, tmp_path): container = Container(app=app, name="test", instance="default") diff --git a/tests/env/dev/types/test_linux_container.py b/tests/env/dev/types/test_linux_container.py index a13fbf02..dbcc8175 100644 --- a/tests/env/dev/types/test_linux_container.py +++ b/tests/env/dev/types/test_linux_container.py @@ -7,6 +7,7 @@ import os import subprocess import sys +from contextlib import contextmanager from subprocess import CompletedProcess import msgspec @@ -14,6 +15,7 @@ from dda.config.constants import AppEnvVars from dda.env.dev.types.linux_container import LinuxContainer +from dda.env.models import EnvironmentState, EnvironmentStatus from dda.utils.fs import Path from dda.utils.git.constants import GitEnvVars @@ -33,12 +35,12 @@ def host_user_args(): return [] if sys.platform == "win32" else ["-e", f"HOST_UID={os.getuid()}", "-e", f"HOST_GID={os.getgid()}"] -def get_starship_mount(shared_dir: Path) -> list[str]: +def get_starship_mount(global_shared_dir: Path) -> list[str]: starship_config_file = Path.home() / ".config" / "starship.toml" if not starship_config_file.exists(): return [] - return ["-v", f"{shared_dir / 'shell' / 'starship.toml'}:/root/.shared/shell/starship.toml"] + return ["-v", f"{global_shared_dir / 'shell' / 'starship.toml'}:/root/.shared/shell/starship.toml"] def get_cache_volumes() -> list[str]: @@ -194,8 +196,9 @@ def test_default(self, dda, helpers, mocker, temp_dir, host_user_args): assert_ssh_config_written(write_server_config, "localhost") - shared_dir = temp_dir / "data" / "env" / "dev" / "linux-container" / ".shared" - starship_mount = get_starship_mount(shared_dir) + shared_dir = temp_dir / "data" / "env" / "dev" / "linux-container" / "default" / ".shared" + global_shared_dir = shared_dir.parent.parent / ".shared" + starship_mount = get_starship_mount(global_shared_dir) cache_volumes = get_cache_volumes() assert calls == [ ( @@ -229,9 +232,11 @@ def test_default(self, dda, helpers, mocker, temp_dir, host_user_args): GitEnvVars.AUTHOR_NAME, "-e", GitEnvVars.AUTHOR_EMAIL, + "-v", + f"{shared_dir}:/.shared", *starship_mount, "-v", - f"{shared_dir / 'shell' / 'zsh' / '.zsh_history'}:/root/.shared/shell/zsh/.zsh_history", + f"{global_shared_dir / 'shell' / 'zsh' / '.zsh_history'}:/root/.shared/shell/zsh/.zsh_history", *cache_volumes, "-v", f"{repo_dir}:/root/repos/datadog-agent", @@ -274,8 +279,9 @@ def test_clone(self, dda, helpers, mocker, temp_dir, host_user_args): assert_ssh_config_written(write_server_config, "localhost") - shared_dir = temp_dir / "data" / "env" / "dev" / "linux-container" / ".shared" - starship_mount = get_starship_mount(shared_dir) + shared_dir = temp_dir / "data" / "env" / "dev" / "linux-container" / "default" / ".shared" + global_shared_dir = shared_dir.parent.parent / ".shared" + starship_mount = get_starship_mount(global_shared_dir) cache_volumes = get_cache_volumes() assert calls == [ ( @@ -309,9 +315,11 @@ def test_clone(self, dda, helpers, mocker, temp_dir, host_user_args): GitEnvVars.AUTHOR_NAME, "-e", GitEnvVars.AUTHOR_EMAIL, + "-v", + f"{shared_dir}:/.shared", *starship_mount, "-v", - f"{shared_dir / 'shell' / 'zsh' / '.zsh_history'}:/root/.shared/shell/zsh/.zsh_history", + f"{global_shared_dir / 'shell' / 'zsh' / '.zsh_history'}:/root/.shared/shell/zsh/.zsh_history", *cache_volumes, "datadog/agent-dev-env-linux", ], @@ -372,8 +380,9 @@ def test_no_pull(self, dda, helpers, mocker, temp_dir, host_user_args): assert_ssh_config_written(write_server_config, "localhost") - shared_dir = temp_dir / "data" / "env" / "dev" / "linux-container" / ".shared" - starship_mount = get_starship_mount(shared_dir) + shared_dir = temp_dir / "data" / "env" / "dev" / "linux-container" / "default" / ".shared" + global_shared_dir = shared_dir.parent.parent / ".shared" + starship_mount = get_starship_mount(global_shared_dir) cache_volumes = get_cache_volumes() assert calls == [ ( @@ -402,10 +411,12 @@ def test_no_pull(self, dda, helpers, mocker, temp_dir, host_user_args): "-e", GitEnvVars.AUTHOR_NAME, "-e", - "GIT_AUTHOR_EMAIL", + GitEnvVars.AUTHOR_EMAIL, + "-v", + f"{shared_dir}:/.shared", *starship_mount, "-v", - f"{shared_dir / 'shell' / 'zsh' / '.zsh_history'}:/root/.shared/shell/zsh/.zsh_history", + f"{global_shared_dir / 'shell' / 'zsh' / '.zsh_history'}:/root/.shared/shell/zsh/.zsh_history", *cache_volumes, "-v", f"{repo_dir}:/root/repos/datadog-agent", @@ -456,8 +467,9 @@ def test_multiple(self, dda, helpers, mocker, temp_dir, host_user_args): assert_ssh_config_written(write_server_config, "localhost") - shared_dir = temp_dir / "data" / "env" / "dev" / "linux-container" / ".shared" - starship_mount = get_starship_mount(shared_dir) + shared_dir = temp_dir / "data" / "env" / "dev" / "linux-container" / "default" / ".shared" + global_shared_dir = shared_dir.parent.parent / ".shared" + starship_mount = get_starship_mount(global_shared_dir) cache_volumes = get_cache_volumes() assert calls == [ ( @@ -491,9 +503,11 @@ def test_multiple(self, dda, helpers, mocker, temp_dir, host_user_args): GitEnvVars.AUTHOR_NAME, "-e", GitEnvVars.AUTHOR_EMAIL, + "-v", + f"{shared_dir}:/.shared", *starship_mount, "-v", - f"{shared_dir / 'shell' / 'zsh' / '.zsh_history'}:/root/.shared/shell/zsh/.zsh_history", + f"{global_shared_dir / 'shell' / 'zsh' / '.zsh_history'}:/root/.shared/shell/zsh/.zsh_history", *cache_volumes, "-v", f"{repo1_dir}:/root/repos/datadog-agent", @@ -539,8 +553,9 @@ def test_multiple_clones(self, dda, helpers, mocker, temp_dir, host_user_args): assert_ssh_config_written(write_server_config, "localhost") - shared_dir = temp_dir / "data" / "env" / "dev" / "linux-container" / ".shared" - starship_mount = get_starship_mount(shared_dir) + shared_dir = temp_dir / "data" / "env" / "dev" / "linux-container" / "default" / ".shared" + global_shared_dir = shared_dir.parent.parent / ".shared" + starship_mount = get_starship_mount(global_shared_dir) cache_volumes = get_cache_volumes() assert calls == [ ( @@ -574,9 +589,11 @@ def test_multiple_clones(self, dda, helpers, mocker, temp_dir, host_user_args): GitEnvVars.AUTHOR_NAME, "-e", GitEnvVars.AUTHOR_EMAIL, + "-v", + f"{shared_dir}:/.shared", *starship_mount, "-v", - f"{shared_dir / 'shell' / 'zsh' / '.zsh_history'}:/root/.shared/shell/zsh/.zsh_history", + f"{global_shared_dir / 'shell' / 'zsh' / '.zsh_history'}:/root/.shared/shell/zsh/.zsh_history", *cache_volumes, "datadog/agent-dev-env-linux", ], @@ -631,8 +648,9 @@ def test_multiple_clones(self, dda, helpers, mocker, temp_dir, host_user_args): def test_extra_volume_specs(self, dda, helpers, mocker, temp_dir, host_user_args, volume_specs): mocker.patch("dda.utils.ssh.write_server_config") - shared_dir = temp_dir / "data" / "env" / "dev" / "linux-container" / ".shared" - starship_mount = get_starship_mount(shared_dir) + shared_dir = temp_dir / "data" / "env" / "dev" / "linux-container" / "default" / ".shared" + global_shared_dir = shared_dir.parent.parent / ".shared" + starship_mount = get_starship_mount(global_shared_dir) cache_volumes = get_cache_volumes() with ( @@ -692,9 +710,11 @@ def test_extra_volume_specs(self, dda, helpers, mocker, temp_dir, host_user_args GitEnvVars.AUTHOR_NAME, "-e", GitEnvVars.AUTHOR_EMAIL, + "-v", + f"{shared_dir}:/.shared", *starship_mount, "-v", - f"{shared_dir / 'shell' / 'zsh' / '.zsh_history'}:/root/.shared/shell/zsh/.zsh_history", + f"{global_shared_dir / 'shell' / 'zsh' / '.zsh_history'}:/root/.shared/shell/zsh/.zsh_history", *cache_volumes, *[(x if x != "-v" else "--volume") for x in volume_specs], "datadog/agent-dev-env-linux", @@ -732,8 +752,9 @@ def test_extra_volume_specs(self, dda, helpers, mocker, temp_dir, host_user_args def test_extra_mounts(self, dda, helpers, mocker, temp_dir, host_user_args, mount_specs): mocker.patch("dda.utils.ssh.write_server_config") - shared_dir = temp_dir / "data" / "env" / "dev" / "linux-container" / ".shared" - starship_mount = get_starship_mount(shared_dir) + shared_dir = temp_dir / "data" / "env" / "dev" / "linux-container" / "default" / ".shared" + global_shared_dir = shared_dir.parent.parent / ".shared" + starship_mount = get_starship_mount(global_shared_dir) cache_volumes = get_cache_volumes() with ( @@ -794,9 +815,11 @@ def test_extra_mounts(self, dda, helpers, mocker, temp_dir, host_user_args, moun GitEnvVars.AUTHOR_NAME, "-e", GitEnvVars.AUTHOR_EMAIL, + "-v", + f"{shared_dir}:/.shared", *starship_mount, "-v", - f"{shared_dir / 'shell' / 'zsh' / '.zsh_history'}:/root/.shared/shell/zsh/.zsh_history", + f"{global_shared_dir / 'shell' / 'zsh' / '.zsh_history'}:/root/.shared/shell/zsh/.zsh_history", *cache_volumes, *[(x if x != "-m" else "--mount") for x in mount_specs], "datadog/agent-dev-env-linux", @@ -1231,3 +1254,254 @@ def test_bytes(self, dda, helpers): """ ), ) + + +@pytest.fixture +def temp_shared_dir(temp_dir): + """Temporary shared directory simulating the intermediate location for docker cp.""" + shared = temp_dir / "share_test" + shared.ensure_dir() + return shared + + +@pytest.fixture +def export_destination(temp_dir): + """Destination directory for exported files.""" + dest = temp_dir / "final_destination" + dest.ensure_dir() + return dest + + +@pytest.fixture +def linux_container_with_shared_dir(app, mocker, temp_shared_dir): + """LinuxContainer instance configured with a mocked shared temp directory. + + Can be used for both export and import testing. + """ + container = LinuxContainer(app=app, name="test", instance="default") + mocker.patch.object(container, "status", return_value=EnvironmentStatus(state=EnvironmentState.STARTED)) + + # Mock the temp_directory context manager to return our controlled temp directory + @contextmanager + def _temp_directory(dir=None): # noqa: ARG001, A002 + yield temp_shared_dir + + mocker.patch("dda.utils.fs.temp_directory", _temp_directory) + return container + + +class TestExportFiles: + """Test LinuxContainer.export_files() orchestration of docker cp and import_from_dir.""" + + @pytest.mark.parametrize( + ("sources", "recursive", "force", "mkpath", "expected_docker_cp_calls"), + [ + pytest.param( + ("file.txt",), + False, + False, + False, + [("dda-test-default:file.txt", "file.txt")], + id="single_file", + ), + pytest.param( + ("file1.txt", "file2.txt"), + False, + True, + False, + [ + ("dda-test-default:file1.txt", "file1.txt"), + ("dda-test-default:file2.txt", "file2.txt"), + ], + id="multiple_files_with_force", + ), + pytest.param( + ("folder",), + True, + False, + False, + [("dda-test-default:folder", "folder")], + id="single_directory_recursive", + ), + pytest.param( + ("file.txt", "folder", "file2.txt"), + True, + False, + True, + [ + ("dda-test-default:file.txt", "file.txt"), + ("dda-test-default:folder", "folder"), + ("dda-test-default:file2.txt", "file2.txt"), + ], + id="mixed_files_and_directories_with_mkpath", + ), + pytest.param( + ("dir1", "dir2"), + True, + True, + True, + [ + ("dda-test-default:dir1", "dir1"), + ("dda-test-default:dir2", "dir2"), + ], + id="multiple_directories_all_flags", + ), + ], + ) + def test_export_orchestration( + self, + mocker, + linux_container_with_shared_dir, + temp_shared_dir, + export_destination, + sources, + recursive, + force, + mkpath, + expected_docker_cp_calls, + ): + """Verify that export_files correctly orchestrates docker cp and import_from_dir calls.""" + + # Track docker cp calls + docker_cp_calls = [] + + def _mock_docker_cp(source: str, destination: str, cwd: Path | None = None) -> None: # noqa: ARG001 + docker_cp_calls.append((source, destination)) + + mocker.patch.object(linux_container_with_shared_dir, "_docker_cp", _mock_docker_cp) + + # Mock import_from_dir where it's used (in linux_container module) + mock_import_from_dir = mocker.patch("dda.env.dev.types.linux_container.import_from_dir") + + # Execute + linux_container_with_shared_dir.export_files( + sources=sources, + destination=export_destination, + recursive=recursive, + force=force, + mkpath=mkpath, + ) + + # Verify docker cp was called correctly for each source + assert docker_cp_calls == expected_docker_cp_calls + + # Verify import_from_dir was called once with correct parameters + mock_import_from_dir.assert_called_once_with( + temp_shared_dir, + export_destination, + recursive=recursive, + force=force, + mkpath=mkpath, + ) + + +class TestImportFiles: + """Test LinuxContainer.import_files() orchestration of file copying and dda command execution.""" + + @pytest.mark.parametrize( + ("sources", "destination", "recursive", "force", "mkpath"), + [ + pytest.param( + ("file_root.txt",), + "/root/dest", + False, + False, + False, + id="single_file", + ), + pytest.param( + ("file_root.txt", "file_root2.txt"), + "/root/dest", + False, + True, + False, + id="multiple_files_with_force", + ), + pytest.param( + ("folder1",), + "/root/dest", + True, + False, + False, + id="single_directory_recursive", + ), + pytest.param( + ("file_root.txt", "folder1", "file_root2.txt"), + "/root/dest", + True, + False, + True, + id="mixed_files_and_directories_with_mkpath", + ), + pytest.param( + ("folder1", "folder2"), + "/root/dest", + True, + True, + True, + id="multiple_directories_all_flags", + ), + ], + ) + def test_import_orchestration( + self, + mocker, + linux_container_with_shared_dir, + temp_shared_dir, + sources, + destination, + recursive, + force, + mkpath, + ): + """Verify that import_files correctly copies files to shared dir and runs dda command.""" + # Get source paths from fixtures + fixtures_dir = Path(__file__).parent.parent / "fixtures" / "fs_tests" + source_paths = [fixtures_dir / source for source in sources] + + # Mock subprocess.wait to capture the dda command + mock_subprocess_wait = mocker.patch.object(linux_container_with_shared_dir.app.subprocess, "wait") + + # Execute + linux_container_with_shared_dir.import_files( + sources=source_paths, + destination=destination, + recursive=recursive, + force=force, + mkpath=mkpath, + ) + + # Verify each source was copied to the shared temp directory + for source in sources: + shared_path = temp_shared_dir / source + fixture_path = fixtures_dir / source + + assert shared_path.exists(), f"Expected {source} to be copied to shared directory" + + if fixture_path.is_file(): + # For files, compare content + assert shared_path.read_text() == fixture_path.read_text() + else: + # For directories, verify all contents match recursively + for fixture_item in fixture_path.rglob("*"): + relative_path = fixture_item.relative_to(fixture_path) + shared_item = shared_path / relative_path + assert shared_item.exists(), f"Missing {relative_path} in copied {source}" + if fixture_item.is_file(): + assert shared_item.read_text() == fixture_item.read_text() + + # Verify the dda command was executed with correct arguments + mock_subprocess_wait.assert_called_once() + command = mock_subprocess_wait.call_args[0][0] + + # The command is wrapped in SSH - the last element contains the actual shell command + shell_command = command[-1].removeprefix("cd /root && ") + expected = " ".join([ + "dda env dev fs localimport", + f"/.shared/{temp_shared_dir.name}", + destination, + str(recursive), + str(force), + str(mkpath), + ]) + assert expected == shell_command