Skip to content

Commit 709c90f

Browse files
authored
Merge branch 'develop' into add-slide-data
2 parents b99f0e5 + adc18c9 commit 709c90f

33 files changed

+847
-759
lines changed

.github/workflows/python-package.yml

Lines changed: 16 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -20,9 +20,9 @@ jobs:
2020
python-version: ["3.10", "3.11", "3.12", "3.13"]
2121

2222
steps:
23-
- uses: actions/checkout@v3
23+
- uses: actions/checkout@v4
2424
- name: Set up Python ${{ matrix.python-version }}
25-
uses: actions/setup-python@v3
25+
uses: actions/setup-python@v4
2626
with:
2727
python-version: ${{ matrix.python-version }}
2828
- name: Install dependencies
@@ -33,7 +33,7 @@ jobs:
3333
python -m pip install ruff==0.13.3 pytest pytest-cov pytest-runner
3434
pip install -r requirements/requirements.txt
3535
- name: Cache tiatoolbox static assets
36-
uses: actions/cache@v3
36+
uses: actions/cache@v4
3737
with:
3838
key: tiatoolbox-home-static
3939
path: ~/.tiatoolbox
@@ -76,6 +76,17 @@ jobs:
7676
coverage-file: coverage.xml
7777
dsn: ${{ secrets.DEEPSOURCE_DSN }}
7878
fail-ci-on-error: false
79+
- name: List tiatoolbox contents
80+
run: ls -lahR ~/.tiatoolbox
81+
- name: Delete Hugging Face cache for large models
82+
run: |
83+
find ~/.tiatoolbox/models -type f -size +250M -exec bash -c '
84+
for model_path; do
85+
model_name=$(basename "$model_path")
86+
cache_dir="$HOME/.tiatoolbox/models/.cache/huggingface/download"
87+
rm -vf "$cache_dir/${model_name}.lock" "$cache_dir/${model_name}.metadata"
88+
done
89+
' bash {} +
7990
8091
release:
8192
runs-on: ubuntu-24.04
@@ -84,10 +95,10 @@ jobs:
8495
if: github.ref == 'refs/heads/master' || github.ref == 'refs/heads/main' || github.ref == 'refs/heads/pre-release' || startsWith(github.ref, 'refs/tags/v')
8596

8697
steps:
87-
- uses: actions/checkout@v3
98+
- uses: actions/checkout@v4
8899

89100
- name: Set up Python 3.10
90-
uses: actions/setup-python@v3
101+
uses: actions/setup-python@v4
91102
with:
92103
python-version: '3.10'
93104
cache: 'pip'

requirements/requirements.conda.yml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -9,6 +9,6 @@ dependencies:
99
- openslide
1010
- pip>=20.0.2
1111
- pixman>=0.39.0
12-
- python>=3.9, <=3.12
12+
- python>=3.10, <=3.13
1313
- pip:
1414
- -r requirements.txt

requirements/requirements.dev.conda.yml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -9,6 +9,6 @@ dependencies:
99
- openslide
1010
- pip>=20.0.2
1111
- pixman>=0.39.0
12-
- python>=3.9, <=3.12
12+
- python>=3.10, <=3.13
1313
- pip:
1414
- -r requirements_dev.txt

requirements/requirements.txt

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -9,6 +9,7 @@ filelock>=3.9.0
99
flask>=2.2.2
1010
flask-cors>=4.0.0
1111
glymur>=0.12.7
12+
huggingface_hub>=0.33.3
1213
imagecodecs>=2022.9.26
1314
joblib>=1.1.1
1415
jupyterlab>=3.5.2

requirements/requirements.win64.conda.yml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -9,6 +9,6 @@ dependencies:
99
- openjpeg>=2.4.0
1010
- pip>=20.0.2
1111
- pixman>=0.39.0
12-
- python>=3.9, <=3.12
12+
- python>=3.10, <=3.13
1313
- pip:
1414
- -r requirements.txt

requirements/requirements.win64.dev.conda.yml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -9,6 +9,6 @@ dependencies:
99
- openjpeg>=2.4.0
1010
- pip>=20.0.2
1111
- pixman>=0.39.0
12-
- python>=3.9, <=3.12
12+
- python>=3.10, <=3.13
1313
- pip:
1414
- -r requirements_dev.txt

tests/conftest.py

Lines changed: 36 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -659,3 +659,39 @@ def timed(fn: Callable, *args: object) -> (Callable, float):
659659
end = time.time()
660660
compile_time = end - start
661661
return result, compile_time
662+
663+
664+
_tmp_paths: list[Path] = []
665+
666+
667+
@pytest.fixture
668+
def track_tmp_path(tmp_path: Path) -> Path:
669+
"""This fixture tracks `tmp_path` for clean up.
670+
671+
Fixture that wraps pytest's built-in `tmp_path` and tracks each temporary path
672+
for later cleanup at the module level.
673+
674+
Returns:
675+
Path: The temporary directory path for the current test function.
676+
677+
"""
678+
_tmp_paths.append(tmp_path)
679+
return tmp_path
680+
681+
682+
@pytest.fixture(scope="module", autouse=True)
683+
def module_teardown() -> None:
684+
"""This module tears down temporary data directories.
685+
686+
Module-scoped fixture that automatically runs after all tests in a module.
687+
It cleans up all temporary paths tracked during the module's execution.
688+
689+
Yields:
690+
None: Allows pytest to run tests before executing the teardown logic.
691+
692+
"""
693+
yield
694+
for path in _tmp_paths:
695+
if path.exists():
696+
shutil.rmtree(path)
697+
print(f"Cleaned up: {path}")

tests/models/test_arch_mapde.py

Lines changed: 5 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,7 @@
11
"""Unit test package for SCCNN."""
22

33
from collections.abc import Callable
4+
from pathlib import Path
45

56
import numpy as np
67
import torch
@@ -14,15 +15,15 @@
1415
ON_GPU = toolbox_env.has_gpu()
1516

1617

17-
def _load_mapde(name: str) -> MapDe:
18+
def _load_mapde(name: str) -> tuple[MapDe, str]:
1819
"""Loads MapDe model with specified weights."""
1920
model = MapDe()
2021
weights_path = fetch_pretrained_weights(name)
2122
map_location = select_device(on_gpu=ON_GPU)
2223
pretrained = torch.load(weights_path, map_location=map_location)
2324
model.load_state_dict(pretrained)
2425
model.to(map_location)
25-
return model
26+
return model, weights_path
2627

2728

2829
def test_functionality(remote_sample: Callable) -> None:
@@ -42,12 +43,13 @@ def test_functionality(remote_sample: Callable) -> None:
4243
coord_space="resolution",
4344
)
4445

45-
model = _load_mapde(name="mapde-conic")
46+
model, weights_path = _load_mapde(name="mapde-conic")
4647
patch = model.preproc(patch)
4748
batch = torch.from_numpy(patch)[None]
4849
output = model.infer_batch(model, batch, device=select_device(on_gpu=ON_GPU))
4950
output = model.postproc(output[0])
5051
assert np.all(output[0:2] == [[19, 171], [53, 89]])
52+
Path(weights_path).unlink()
5153

5254

5355
def test_multiclass_output() -> None:

tests/models/test_arch_micronet.py

Lines changed: 3 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -42,6 +42,7 @@ def test_functionality(
4242
output = model.infer_batch(model, batch, device=map_location)
4343
output, _ = model.postproc(output[0])
4444
assert np.max(np.unique(output)) == 46
45+
Path(weights_path).unlink()
4546

4647

4748
def test_value_error() -> None:
@@ -54,7 +55,7 @@ def test_value_error() -> None:
5455
toolbox_env.running_on_ci() or not ON_GPU,
5556
reason="Local test on machine with GPU.",
5657
)
57-
def test_micronet_output(remote_sample: Callable, tmp_path: Path) -> None:
58+
def test_micronet_output(remote_sample: Callable, track_tmp_path: Path) -> None:
5859
"""Test the output of MicroNet."""
5960
svs_1_small = Path(remote_sample("svs-1-small"))
6061
micronet_output = Path(remote_sample("micronet-output"))
@@ -74,7 +75,7 @@ def test_micronet_output(remote_sample: Callable, tmp_path: Path) -> None:
7475
imgs=[
7576
svs_1_small,
7677
],
77-
save_dir=tmp_path / "output",
78+
save_dir=track_tmp_path / "output",
7879
)
7980

8081
output = np.load(output[0][1] + ".raw.0.npy")

tests/models/test_dataset.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -86,9 +86,9 @@ def test_kather_nonexisting_dir() -> None:
8686
_ = KatherPatchDataset(save_dir_path="non-existing-path")
8787

8888

89-
def test_kather_dataset(tmp_path: Path) -> None:
89+
def test_kather_dataset(track_tmp_path: Path) -> None:
9090
"""Test for kather patch dataset."""
91-
save_dir_path = tmp_path
91+
save_dir_path = track_tmp_path
9292

9393
# save to temporary location
9494
# remove previously generated data

0 commit comments

Comments
 (0)