diff --git a/.github/workflows/publish_pypi.yml b/.github/workflows/publish_pypi.yml index 1bd4bbe..f847971 100644 --- a/.github/workflows/publish_pypi.yml +++ b/.github/workflows/publish_pypi.yml @@ -83,7 +83,7 @@ jobs: CIBW_REPAIR_WHEEL_COMMAND_WINDOWS: "delvewheel repair -w {dest_dir} {wheel}" CIBW_ARCHS: ${{ matrix.cibw_archs }} CIBW_BEFORE_TEST_LINUX: dnf -y install maven java - CIBW_TEST_REQUIRES: bfio>=2.4.0 tensorstore numpy + CIBW_TEST_REQUIRES: bfio>=2.4.0 tensorstore numpy pytest CIBW_TEST_COMMAND: python -W default -m unittest discover -s {project}/tests -v - name: Install Dependencies @@ -143,7 +143,7 @@ jobs: DYLD_LIBRARY_PATH=$REPAIR_LIBRARY_PATH delocate-listdeps {wheel} && MACOSX_DEPLOYMENT_TARGET=11.0 DYLD_LIBRARY_PATH=$REPAIR_LIBRARY_PATH delocate-wheel --require-archs {delocate_archs} -w {dest_dir} {wheel} -e libc++ -e libunwind CIBW_ARCHS: ${{ matrix.cibw_archs }} - CIBW_TEST_REQUIRES: bfio>=2.4.0 tensorstore numpy + CIBW_TEST_REQUIRES: bfio>=2.4.0 tensorstore numpy pytest CIBW_TEST_COMMAND: python -W default -m unittest discover -s {project}/tests -v - name: Install Dependencies diff --git a/.github/workflows/wheel_build.yml b/.github/workflows/wheel_build.yml index 7b741dc..0aace75 100644 --- a/.github/workflows/wheel_build.yml +++ b/.github/workflows/wheel_build.yml @@ -82,7 +82,7 @@ jobs: CIBW_REPAIR_WHEEL_COMMAND_WINDOWS: "delvewheel repair -w {dest_dir} {wheel}" CIBW_ARCHS: ${{ matrix.cibw_archs }} CIBW_BEFORE_TEST_LINUX: dnf -y install maven java - CIBW_TEST_REQUIRES: bfio>=2.4.0 tensorstore numpy + CIBW_TEST_REQUIRES: bfio>=2.4.0 tensorstore numpy pytest CIBW_TEST_COMMAND: python -W default -m unittest discover -s {project}/tests -v - name: Upload Artifact @@ -139,7 +139,7 @@ jobs: DYLD_LIBRARY_PATH=$REPAIR_LIBRARY_PATH delocate-listdeps {wheel} && MACOSX_DEPLOYMENT_TARGET=11.0 DYLD_LIBRARY_PATH=$REPAIR_LIBRARY_PATH delocate-wheel --require-archs {delocate_archs} -w {dest_dir} {wheel} -e libc++ -e libunwind CIBW_ARCHS: ${{ matrix.cibw_archs }} - CIBW_TEST_REQUIRES: bfio>=2.4.0 tensorstore numpy + CIBW_TEST_REQUIRES: bfio>=2.4.0 tensorstore numpy pytest CIBW_TEST_COMMAND: python -W default -m unittest discover -s {project}/tests -v - name: Upload Artifact diff --git a/pyproject.toml b/pyproject.toml index 2e73259..d5f4365 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,3 +1,3 @@ [build-system] -requires = ["setuptools>=61.0", "wheel", "looseversion", "versioneer", "cmake"] +requires = ["setuptools>=61.0", "wheel", "looseversion", "versioneer", "pytest", "cmake"] build-backend = "setuptools.build_meta" \ No newline at end of file diff --git a/pytest.ini b/pytest.ini new file mode 100644 index 0000000..4bf139f --- /dev/null +++ b/pytest.ini @@ -0,0 +1,10 @@ +[pytest] +testpaths = + tests/python +python_files = + test_*.py +addopts = + --ignore=tests/python/test_read.py + --ignore=tests/python/test_pytest_bridge.py +markers = + integration: integration tests that touch disk/network diff --git a/tests/python/io_utils.py b/tests/python/io_utils.py new file mode 100644 index 0000000..23fbf8e --- /dev/null +++ b/tests/python/io_utils.py @@ -0,0 +1,37 @@ +from dataclasses import dataclass +from pathlib import Path +import subprocess + + + +@dataclass(frozen=True) +class PolusTestDataRepo: + repo_url: str = "https://github.com/sameeul/polus-test-data.git" + repo_dir: Path = Path("polus-test-data") + default_branch: str = "main" + + +def ensure_repo_cloned(repo: PolusTestDataRepo, depth: int = 1) -> Path: + """ + Ensure the repo exists locally. If not, clone it. + Returns the local repo directory. + """ + if repo.repo_dir.exists(): + return repo.repo_dir + + repo.repo_dir.parent.mkdir(parents=True, exist_ok=True) + subprocess.run( + ["git", "clone", "--depth", str(depth), repo.repo_url, str(repo.repo_dir)], + check=True, + ) + return repo.repo_dir + + +def get_local_file_path(repo_dir: Path, rel_path: Path) -> Path: + """ + Resolve a path inside the repo and validate it exists. + """ + local_path = (repo_dir / rel_path).resolve() + if not local_path.exists(): + raise FileNotFoundError(f"File not found: {local_path}") + return local_path \ No newline at end of file diff --git a/tests/python/multi_images.py b/tests/python/multi_images.py new file mode 100644 index 0000000..1afafbb --- /dev/null +++ b/tests/python/multi_images.py @@ -0,0 +1,58 @@ +from pathlib import Path +from typing import Mapping +from types import MappingProxyType + +from argolid import PyramidGenerartor +DEFAULT_DOWNSAMPLE_METHODS = MappingProxyType({1: "mean"}) +from .io_utils import ensure_repo_cloned, PolusTestDataRepo + + +def generate_pyramid_from_repo_path( + *, + repo: PolusTestDataRepo, + file_pattern: str, + image_name: str = "test_image", + output_dir: str | Path, + min_dim: int = 1024, + vis_type: str = "Viv", + downsample_methods: Mapping[int, str] = DEFAULT_DOWNSAMPLE_METHODS, +) -> None: + """ + Clone the repo if needed, locate the image file, then run Argolid pyramid generation. + """ + repo_dir = ensure_repo_cloned(repo) + input_dir = str(repo_dir / "argolid") + + output_dir = Path(output_dir) + output_dir.parent.mkdir(parents=True, exist_ok=True) + + pyr_gen = PyramidGenerartor() + pyr_gen.generate_from_image_collection( + input_dir, + file_pattern, + image_name, + str(output_dir), + min_dim, + vis_type, + dict(downsample_methods), + ) + + +def main() -> None: + repo = PolusTestDataRepo() + file_pattern = "x{x:d}_y{y:d}_c{c:d}.ome.tiff" + output_dir = Path("output") / "2D_pyramid_assembled" + + generate_pyramid_from_repo_path( + repo=repo, + file_pattern=file_pattern, + image_name="test_image", + output_dir=output_dir, + min_dim=1024, + vis_type="Viv", + downsample_methods={1: "mean"}, + ) + + +if __name__ == "__main__": + main() diff --git a/tests/python/single_image.py b/tests/python/single_image.py new file mode 100644 index 0000000..028f944 --- /dev/null +++ b/tests/python/single_image.py @@ -0,0 +1,57 @@ +from pathlib import Path +from typing import Mapping +from types import MappingProxyType + +from argolid import PyramidGenerartor +from .io_utils import ensure_repo_cloned, get_local_file_path, PolusTestDataRepo + + +DEFAULT_DOWNSAMPLE_METHODS: Mapping[int, str] = MappingProxyType({1: "mean"}) + + +def generate_pyramid_from_repo_file( + *, + repo: PolusTestDataRepo, + rel_image_path: Path, + output_dir: str | Path, + min_dim: int = 1024, + vis_type: str = "Viv", + downsample_methods: Mapping[int, str] = DEFAULT_DOWNSAMPLE_METHODS, +) -> None: + """ + Clone the repo if needed, locate the image file, then run Argolid pyramid generation. + """ + repo_dir = ensure_repo_cloned(repo) + input_file = str(get_local_file_path(repo_dir, rel_image_path)) + + output_dir = Path(output_dir) + output_dir.parent.mkdir(parents=True, exist_ok=True) + + pyr_gen = PyramidGenerartor() + pyr_gen.generate_from_single_image( + input_file, + str(output_dir), + min_dim, + vis_type, + dict(downsample_methods), + ) + + +def main() -> None: + repo = PolusTestDataRepo() + + rel_image_path = Path("argolid") / "x0_y0_c1.ome.tiff" + output_dir = Path("output") / "one_image_ome_zarr" + + generate_pyramid_from_repo_file( + repo=repo, + rel_image_path=rel_image_path, + output_dir=output_dir, + min_dim=1024, + vis_type="Viv", + downsample_methods={1: "mean"}, + ) + + +if __name__ == "__main__": + main() diff --git a/tests/python/test_multi_images.py b/tests/python/test_multi_images.py new file mode 100644 index 0000000..a651a21 --- /dev/null +++ b/tests/python/test_multi_images.py @@ -0,0 +1,34 @@ +import shutil +from pathlib import Path + +import pytest + +from .io_utils import PolusTestDataRepo +from .multi_images import generate_pyramid_from_repo_path +from .zarr_assertions import assert_is_argolid_omexml_zarr_pyramid + + +@pytest.mark.integration +def test_stitched_image_collection_pyramid(tmp_path: Path) -> None: + if shutil.which("git") is None: + pytest.skip("git not available") + + repo = PolusTestDataRepo(repo_dir=tmp_path / "polus-test-data") + out_dir = tmp_path / "out" + + generate_pyramid_from_repo_path( + repo=repo, + file_pattern="x{x:d}_y{y:d}_c{c:d}.ome.tiff", + image_name="stitched_image", + output_dir=out_dir, + min_dim=1024, + vis_type="Viv", + downsample_methods={1: "mean"}, + ) + + shapes = assert_is_argolid_omexml_zarr_pyramid(out_dir / "stitched_image.zarr" , expect_levels=2) + + # ensure at least one spatial dimension shrinks between level 0 and 1 + y0, x0 = shapes[0][-2], shapes[0][-1] + y1, x1 = shapes[1][-2], shapes[1][-1] + assert (y1 < y0) or (x1 < x0), f"Expected level 1 to be downsampled vs level 0, got L0={shapes[0]} L1={shapes[1]}" diff --git a/tests/python/test_pytest_bridge.py b/tests/python/test_pytest_bridge.py new file mode 100644 index 0000000..ecc933d --- /dev/null +++ b/tests/python/test_pytest_bridge.py @@ -0,0 +1,32 @@ +import os +import subprocess +import sys +import unittest +from pathlib import Path + + +if "PYTEST_CURRENT_TEST" in os.environ: + raise unittest.SkipTest("pytest bridge should not run under pytest") + +class TestPytestSuite(unittest.TestCase): + """Run pytest-based tests under unittest-driven CI.""" + def test_pytest(self) -> None: + "The entry point of pytest execution" + tests_python = Path(__file__).resolve().parent # tests/python + + env = os.environ.copy() + env["PYTEST_DISABLE_PLUGIN_AUTOLOAD"] = "1" + + subprocess.run( + [ + sys.executable, + "-m", + "pytest", + "-vv", + "-s", + str(tests_python), + ], + check=True, + env=env, + timeout=900, + ) diff --git a/tests/python/test_single_image.py b/tests/python/test_single_image.py new file mode 100644 index 0000000..e1cc307 --- /dev/null +++ b/tests/python/test_single_image.py @@ -0,0 +1,30 @@ +import shutil +from pathlib import Path + +import pytest + +from .io_utils import PolusTestDataRepo +from .single_image import generate_pyramid_from_repo_file +from .zarr_assertions import assert_is_argolid_omexml_zarr_pyramid + + +@pytest.mark.integration +def test_single_image_pyramid(tmp_path: Path) -> None: + if shutil.which("git") is None: + pytest.skip("git not available") + + repo = PolusTestDataRepo(repo_dir=tmp_path / "polus-test-data") + out_dir = tmp_path / "out" / "single_image" + + generate_pyramid_from_repo_file( + repo=repo, + rel_image_path=Path("argolid") / "x0_y0_c1.ome.tiff", + output_dir=out_dir, + ) + + shapes = assert_is_argolid_omexml_zarr_pyramid(out_dir, expect_levels=2) + + # ensure at least one spatial dimension shrinks between level 0 and 1 + y0, x0 = shapes[0][-2], shapes[0][-1] + y1, x1 = shapes[1][-2], shapes[1][-1] + assert (y1 < y0) or (x1 < x0), f"Expected level 1 to be downsampled vs level 0, got L0={shapes[0]} L1={shapes[1]}" diff --git a/tests/python/zarr_assertions.py b/tests/python/zarr_assertions.py new file mode 100644 index 0000000..db2130a --- /dev/null +++ b/tests/python/zarr_assertions.py @@ -0,0 +1,133 @@ +from __future__ import annotations + +from pathlib import Path +import zarr + + +def find_argolid_root(out_dir: Path) -> Path: + """ + Find the Argolid output root directory under out_dir. + The root is identified by: + - directory ending with .zarr or .ome.zarr + - containing METADATA.ome.xml + - containing data.zarr/ + """ + out_dir = out_dir.resolve() + + if not out_dir.exists(): + raise FileNotFoundError(f"Output directory does not exist: {out_dir}") + + # If caller passed the root itself + if ( + out_dir.is_dir() + and out_dir.suffix == ".zarr" + and (out_dir / "METADATA.ome.xml").exists() + and (out_dir / "data.zarr").is_dir() + ): + return out_dir + + # Otherwise search one level down + candidates = [] + for p in out_dir.iterdir(): + if not p.is_dir(): + continue + if p.suffix != ".zarr": + continue + if not (p / "METADATA.ome.xml").exists(): + continue + if not (p / "data.zarr").is_dir(): + continue + candidates.append(p) + + if len(candidates) == 1: + return candidates[0] + + if len(candidates) > 1: + return max(candidates, key=lambda p: p.stat().st_mtime) + + raise FileNotFoundError( + f"No Argolid zarr output found under {out_dir}. " + f"Expected a *.zarr directory containing METADATA.ome.xml and data.zarr/" + ) + + +def open_argolid_data_group(argolid_root: Path) -> zarr.Group: + """ + Open the data.zarr group inside Argolid output. + """ + data_path = argolid_root / "data.zarr" + if not data_path.exists(): + raise FileNotFoundError(f"Missing data.zarr at {data_path}") + return zarr.open_group(str(data_path), mode="r") + + +def assert_is_argolid_omexml_zarr_pyramid(out_dir: Path, expect_levels: int | None = None) -> list[tuple[int, ...]]: + """ + Validate Argolid-style output: + .ome.zarr/METADATA.ome.xml + .ome.zarr/data.zarr/0//... + + Returns: + Shapes for each pyramid level found under data.zarr/0/ + """ + root = find_argolid_root(out_dir) + + # METADATA.ome.xml exists and non-empty + ome_xml = root / "METADATA.ome.xml" + assert ome_xml.exists(), f"Missing {ome_xml}" + assert ome_xml.stat().st_size > 0, f"Empty metadata file: {ome_xml}" + + data = open_argolid_data_group(root) + + # Argolid stores pyramids under "0/" + assert "0" in data, f"Expected '0' in data.zarr. keys={list(data.keys())}" + series0 = data["0"] + assert isinstance(series0, zarr.Group) + + # levels can be arrays or groups + level_names = sorted( + [k for k in series0.keys() if str(k).isdigit()], + key=lambda s: int(s), + ) + + assert level_names, ( + f"No pyramid levels found under data.zarr/0. " + f"keys={list(series0.keys())}, groups={list(series0.group_keys())}, arrays={list(series0.array_keys())}" + ) + + level_strs = {str(k) for k in level_names} + assert "0" in level_strs, f"Missing level 0. Levels found: {level_names}" + + + if expect_levels is not None: + assert len(level_names) >= expect_levels, f"Expected at least {expect_levels} level(s), found {len(level_names)}: {level_names}" + + shapes: list[tuple[int, ...]] = [] + for lvl in level_names: + node = series0[str(lvl)] + + # Level can be a direct array: data.zarr/0/ + if isinstance(node, zarr.Array): + shapes.append(node.shape) + continue + + # Or a group containing one or more arrays: data.zarr/0// + if isinstance(node, zarr.Group): + array_keys = list(node.array_keys()) + assert array_keys, f"No arrays found in level group {lvl}. keys={list(node.keys())}" + arr = node[array_keys[0]] + shapes.append(arr.shape) + continue + + raise AssertionError(f"Unexpected type at level {lvl}: {type(node)}") + + assert shapes, "No shapes collected from pyramid levels" + y0, x0 = shapes[0][-2], shapes[0][-1] + assert y0 > 0 and x0 > 0, "Each array must have non-zero shape" + + # pyramid monotonicity: dims should not increase + for prev, nxt in zip(shapes, shapes[1:]): + assert len(prev) == len(nxt), f"Rank changed: {prev} -> {nxt}" + assert all(n <= p for p, n in zip(prev, nxt)), f"Not a pyramid: {prev} -> {nxt}" + + return shapes