Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 2 additions & 2 deletions .github/workflows/publish_pypi.yml
Original file line number Diff line number Diff line change
Expand Up @@ -83,7 +83,7 @@ jobs:
CIBW_REPAIR_WHEEL_COMMAND_WINDOWS: "delvewheel repair -w {dest_dir} {wheel}"
CIBW_ARCHS: ${{ matrix.cibw_archs }}
CIBW_BEFORE_TEST_LINUX: dnf -y install maven java
CIBW_TEST_REQUIRES: bfio>=2.4.0 tensorstore numpy
CIBW_TEST_REQUIRES: bfio>=2.4.0 tensorstore numpy pytest
CIBW_TEST_COMMAND: python -W default -m unittest discover -s {project}/tests -v

- name: Install Dependencies
Expand Down Expand Up @@ -143,7 +143,7 @@ jobs:
DYLD_LIBRARY_PATH=$REPAIR_LIBRARY_PATH delocate-listdeps {wheel} &&
MACOSX_DEPLOYMENT_TARGET=11.0 DYLD_LIBRARY_PATH=$REPAIR_LIBRARY_PATH delocate-wheel --require-archs {delocate_archs} -w {dest_dir} {wheel} -e libc++ -e libunwind
CIBW_ARCHS: ${{ matrix.cibw_archs }}
CIBW_TEST_REQUIRES: bfio>=2.4.0 tensorstore numpy
CIBW_TEST_REQUIRES: bfio>=2.4.0 tensorstore numpy pytest
CIBW_TEST_COMMAND: python -W default -m unittest discover -s {project}/tests -v

- name: Install Dependencies
Expand Down
4 changes: 2 additions & 2 deletions .github/workflows/wheel_build.yml
Original file line number Diff line number Diff line change
Expand Up @@ -82,7 +82,7 @@ jobs:
CIBW_REPAIR_WHEEL_COMMAND_WINDOWS: "delvewheel repair -w {dest_dir} {wheel}"
CIBW_ARCHS: ${{ matrix.cibw_archs }}
CIBW_BEFORE_TEST_LINUX: dnf -y install maven java
CIBW_TEST_REQUIRES: bfio>=2.4.0 tensorstore numpy
CIBW_TEST_REQUIRES: bfio>=2.4.0 tensorstore numpy pytest
CIBW_TEST_COMMAND: python -W default -m unittest discover -s {project}/tests -v

- name: Upload Artifact
Expand Down Expand Up @@ -139,7 +139,7 @@ jobs:
DYLD_LIBRARY_PATH=$REPAIR_LIBRARY_PATH delocate-listdeps {wheel} &&
MACOSX_DEPLOYMENT_TARGET=11.0 DYLD_LIBRARY_PATH=$REPAIR_LIBRARY_PATH delocate-wheel --require-archs {delocate_archs} -w {dest_dir} {wheel} -e libc++ -e libunwind
CIBW_ARCHS: ${{ matrix.cibw_archs }}
CIBW_TEST_REQUIRES: bfio>=2.4.0 tensorstore numpy
CIBW_TEST_REQUIRES: bfio>=2.4.0 tensorstore numpy pytest
CIBW_TEST_COMMAND: python -W default -m unittest discover -s {project}/tests -v

- name: Upload Artifact
Expand Down
2 changes: 1 addition & 1 deletion pyproject.toml
Original file line number Diff line number Diff line change
@@ -1,3 +1,3 @@
[build-system]
requires = ["setuptools>=61.0", "wheel", "looseversion", "versioneer", "cmake"]
requires = ["setuptools>=61.0", "wheel", "looseversion", "versioneer", "pytest", "cmake"]
build-backend = "setuptools.build_meta"
10 changes: 10 additions & 0 deletions pytest.ini
Original file line number Diff line number Diff line change
@@ -0,0 +1,10 @@
[pytest]
testpaths =
tests/python
python_files =
test_*.py
addopts =
--ignore=tests/python/test_read.py
--ignore=tests/python/test_pytest_bridge.py
markers =
integration: integration tests that touch disk/network
37 changes: 37 additions & 0 deletions tests/python/io_utils.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,37 @@
from dataclasses import dataclass
from pathlib import Path
import subprocess



@dataclass(frozen=True)
class PolusTestDataRepo:
repo_url: str = "https://github.com/sameeul/polus-test-data.git"
repo_dir: Path = Path("polus-test-data")
default_branch: str = "main"


def ensure_repo_cloned(repo: PolusTestDataRepo, depth: int = 1) -> Path:
"""
Ensure the repo exists locally. If not, clone it.
Returns the local repo directory.
"""
if repo.repo_dir.exists():
return repo.repo_dir

repo.repo_dir.parent.mkdir(parents=True, exist_ok=True)
subprocess.run(
["git", "clone", "--depth", str(depth), repo.repo_url, str(repo.repo_dir)],
check=True,
)
return repo.repo_dir


def get_local_file_path(repo_dir: Path, rel_path: Path) -> Path:
"""
Resolve a path inside the repo and validate it exists.
"""
local_path = (repo_dir / rel_path).resolve()
if not local_path.exists():
raise FileNotFoundError(f"File not found: {local_path}")
return local_path
58 changes: 58 additions & 0 deletions tests/python/multi_images.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,58 @@
from pathlib import Path
from typing import Mapping
from types import MappingProxyType

from argolid import PyramidGenerartor
DEFAULT_DOWNSAMPLE_METHODS = MappingProxyType({1: "mean"})
from .io_utils import ensure_repo_cloned, PolusTestDataRepo


def generate_pyramid_from_repo_path(
*,
repo: PolusTestDataRepo,
file_pattern: str,
image_name: str = "test_image",
output_dir: str | Path,
min_dim: int = 1024,
vis_type: str = "Viv",
downsample_methods: Mapping[int, str] = DEFAULT_DOWNSAMPLE_METHODS,
) -> None:
"""
Clone the repo if needed, locate the image file, then run Argolid pyramid generation.
"""
repo_dir = ensure_repo_cloned(repo)
input_dir = str(repo_dir / "argolid")

output_dir = Path(output_dir)
output_dir.parent.mkdir(parents=True, exist_ok=True)

pyr_gen = PyramidGenerartor()
pyr_gen.generate_from_image_collection(
input_dir,
file_pattern,
image_name,
str(output_dir),
min_dim,
vis_type,
dict(downsample_methods),
)


def main() -> None:
repo = PolusTestDataRepo()
file_pattern = "x{x:d}_y{y:d}_c{c:d}.ome.tiff"
output_dir = Path("output") / "2D_pyramid_assembled"

generate_pyramid_from_repo_path(
repo=repo,
file_pattern=file_pattern,
image_name="test_image",
output_dir=output_dir,
min_dim=1024,
vis_type="Viv",
downsample_methods={1: "mean"},
)


if __name__ == "__main__":
main()
57 changes: 57 additions & 0 deletions tests/python/single_image.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,57 @@
from pathlib import Path
from typing import Mapping
from types import MappingProxyType

from argolid import PyramidGenerartor
from .io_utils import ensure_repo_cloned, get_local_file_path, PolusTestDataRepo


DEFAULT_DOWNSAMPLE_METHODS: Mapping[int, str] = MappingProxyType({1: "mean"})


def generate_pyramid_from_repo_file(
*,
repo: PolusTestDataRepo,
rel_image_path: Path,
output_dir: str | Path,
min_dim: int = 1024,
vis_type: str = "Viv",
downsample_methods: Mapping[int, str] = DEFAULT_DOWNSAMPLE_METHODS,
) -> None:
"""
Clone the repo if needed, locate the image file, then run Argolid pyramid generation.
"""
repo_dir = ensure_repo_cloned(repo)
input_file = str(get_local_file_path(repo_dir, rel_image_path))

output_dir = Path(output_dir)
output_dir.parent.mkdir(parents=True, exist_ok=True)

pyr_gen = PyramidGenerartor()
pyr_gen.generate_from_single_image(
input_file,
str(output_dir),
min_dim,
vis_type,
dict(downsample_methods),
)


def main() -> None:
repo = PolusTestDataRepo()

rel_image_path = Path("argolid") / "x0_y0_c1.ome.tiff"
output_dir = Path("output") / "one_image_ome_zarr"

generate_pyramid_from_repo_file(
repo=repo,
rel_image_path=rel_image_path,
output_dir=output_dir,
min_dim=1024,
vis_type="Viv",
downsample_methods={1: "mean"},
)


if __name__ == "__main__":
main()
34 changes: 34 additions & 0 deletions tests/python/test_multi_images.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,34 @@
import shutil
from pathlib import Path

import pytest

from .io_utils import PolusTestDataRepo
from .multi_images import generate_pyramid_from_repo_path
from .zarr_assertions import assert_is_argolid_omexml_zarr_pyramid


@pytest.mark.integration
def test_stitched_image_collection_pyramid(tmp_path: Path) -> None:
if shutil.which("git") is None:
pytest.skip("git not available")

repo = PolusTestDataRepo(repo_dir=tmp_path / "polus-test-data")
out_dir = tmp_path / "out"

generate_pyramid_from_repo_path(
repo=repo,
file_pattern="x{x:d}_y{y:d}_c{c:d}.ome.tiff",
image_name="stitched_image",
output_dir=out_dir,
min_dim=1024,
vis_type="Viv",
downsample_methods={1: "mean"},
)

shapes = assert_is_argolid_omexml_zarr_pyramid(out_dir / "stitched_image.zarr" , expect_levels=2)

# ensure at least one spatial dimension shrinks between level 0 and 1
y0, x0 = shapes[0][-2], shapes[0][-1]
y1, x1 = shapes[1][-2], shapes[1][-1]
assert (y1 < y0) or (x1 < x0), f"Expected level 1 to be downsampled vs level 0, got L0={shapes[0]} L1={shapes[1]}"
32 changes: 32 additions & 0 deletions tests/python/test_pytest_bridge.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,32 @@
import os
import subprocess
import sys
import unittest
from pathlib import Path


if "PYTEST_CURRENT_TEST" in os.environ:
raise unittest.SkipTest("pytest bridge should not run under pytest")

class TestPytestSuite(unittest.TestCase):
"""Run pytest-based tests under unittest-driven CI."""
def test_pytest(self) -> None:
"The entry point of pytest execution"
tests_python = Path(__file__).resolve().parent # tests/python

env = os.environ.copy()
env["PYTEST_DISABLE_PLUGIN_AUTOLOAD"] = "1"

subprocess.run(
[
sys.executable,
"-m",
"pytest",
"-vv",
"-s",
str(tests_python),
],
check=True,
env=env,
timeout=900,
)
30 changes: 30 additions & 0 deletions tests/python/test_single_image.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,30 @@
import shutil
from pathlib import Path

import pytest

from .io_utils import PolusTestDataRepo
from .single_image import generate_pyramid_from_repo_file
from .zarr_assertions import assert_is_argolid_omexml_zarr_pyramid


@pytest.mark.integration
def test_single_image_pyramid(tmp_path: Path) -> None:
if shutil.which("git") is None:
pytest.skip("git not available")

repo = PolusTestDataRepo(repo_dir=tmp_path / "polus-test-data")
out_dir = tmp_path / "out" / "single_image"

generate_pyramid_from_repo_file(
repo=repo,
rel_image_path=Path("argolid") / "x0_y0_c1.ome.tiff",
output_dir=out_dir,
)

shapes = assert_is_argolid_omexml_zarr_pyramid(out_dir, expect_levels=2)

# ensure at least one spatial dimension shrinks between level 0 and 1
y0, x0 = shapes[0][-2], shapes[0][-1]
y1, x1 = shapes[1][-2], shapes[1][-1]
assert (y1 < y0) or (x1 < x0), f"Expected level 1 to be downsampled vs level 0, got L0={shapes[0]} L1={shapes[1]}"
Loading
Loading