Skip to content

Commit c3b0c1c

Browse files
author
Andrei Neagu
committed
restructure tests with common utils
1 parent 6dfcd95 commit c3b0c1c

File tree

5 files changed

+64
-104
lines changed

5 files changed

+64
-104
lines changed
Lines changed: 47 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,47 @@
1+
# pylint: disable=redefined-outer-name
2+
# pylint: disable=unused-argument
3+
4+
from collections.abc import AsyncIterable
5+
from pathlib import Path
6+
7+
import numpy
8+
import pytest
9+
from faker import Faker
10+
from helpers import print_tree
11+
from PIL import Image
12+
from servicelib.file_utils import remove_directory
13+
14+
15+
@pytest.fixture
16+
async def mixed_file_types(tmp_path: Path, faker: Faker) -> AsyncIterable[Path]:
17+
base_dir = tmp_path / "mixed_types_dir"
18+
base_dir.mkdir()
19+
20+
# mixed small text files and binary files
21+
(base_dir / "empty").mkdir()
22+
(base_dir / "d1").mkdir()
23+
(base_dir / "d1" / "f1.txt").write_text(faker.text())
24+
(base_dir / "d1" / "b2.bin").write_bytes(faker.json_bytes())
25+
(base_dir / "d1" / "sd1").mkdir()
26+
(base_dir / "d1" / "sd1" / "f1.txt").write_text(faker.text())
27+
(base_dir / "d1" / "sd1" / "b2.bin").write_bytes(faker.json_bytes())
28+
(base_dir / "images").mkdir()
29+
30+
# images cause issues with zipping, below content produced different
31+
# hashes for zip files
32+
for i in range(4):
33+
image_dir = base_dir / f"images{i}"
34+
image_dir.mkdir()
35+
for n in range(50):
36+
a = numpy.random.rand(1900, 1900, 3) * 255 # noqa: NPY002
37+
im_out = Image.fromarray(a.astype("uint8")).convert("RGB")
38+
image_path = image_dir / f"out{n}.jpg"
39+
im_out.save(image_path)
40+
41+
print("mixed_types_dir ---")
42+
print_tree(base_dir)
43+
44+
yield base_dir
45+
46+
await remove_directory(base_dir)
47+
assert not base_dir.exists()
Lines changed: 8 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,8 @@
1+
from pathlib import Path
2+
3+
4+
def print_tree(path: Path, level=0):
5+
tab = " " * level
6+
print(f"{tab}{'+' if path.is_dir() else '-'} {path if level==0 else path.name}")
7+
for p in path.glob("*"):
8+
print_tree(p, level + 1)

packages/service-library/tests/test_archiving_utils_7zip.py renamed to packages/service-library/tests/archiving_utils/test_archiving__interface_7zip.py

Lines changed: 2 additions & 47 deletions
Original file line numberDiff line numberDiff line change
@@ -2,27 +2,15 @@
22
# pylint: disable=unused-argument
33

44
import json
5-
from collections.abc import AsyncIterable
65
from pathlib import Path
76

8-
import numpy
97
import pytest
10-
from faker import Faker
11-
from PIL import Image
128
from pydantic import NonNegativeInt
139
from servicelib.archiving_utils._interface_7zip import (
1410
ProgressParser,
1511
archive_dir,
1612
unarchive_dir,
1713
)
18-
from servicelib.file_utils import remove_directory
19-
20-
21-
def _print_tree(path: Path, level=0):
22-
tab = " " * level
23-
print(f"{tab}{'+' if path.is_dir() else '-'} {path if level==0 else path.name}")
24-
for p in path.glob("*"):
25-
_print_tree(p, level + 1)
2614

2715

2816
@pytest.fixture
@@ -37,41 +25,6 @@ def unpacked_archive(tmp_path: Path) -> Path:
3725
return path
3826

3927

40-
@pytest.fixture
41-
async def mixed_file_types(tmp_path: Path, faker: Faker) -> AsyncIterable[Path]:
42-
base_dir = tmp_path / "mixed_types_dir"
43-
base_dir.mkdir()
44-
45-
# mixed small text files and binary files
46-
(base_dir / "empty").mkdir()
47-
(base_dir / "d1").mkdir()
48-
(base_dir / "d1" / "f1.txt").write_text(faker.text())
49-
(base_dir / "d1" / "b2.bin").write_bytes(faker.json_bytes())
50-
(base_dir / "d1" / "sd1").mkdir()
51-
(base_dir / "d1" / "sd1" / "f1.txt").write_text(faker.text())
52-
(base_dir / "d1" / "sd1" / "b2.bin").write_bytes(faker.json_bytes())
53-
(base_dir / "images").mkdir()
54-
55-
# images cause issues with zipping, below content produced different
56-
# hashes for zip files
57-
for i in range(4):
58-
image_dir = base_dir / f"images{i}"
59-
image_dir.mkdir()
60-
for n in range(50):
61-
a = numpy.random.rand(1900, 1900, 3) * 255 # noqa: NPY002
62-
im_out = Image.fromarray(a.astype("uint8")).convert("RGB")
63-
image_path = image_dir / f"out{n}.jpg"
64-
im_out.save(image_path)
65-
66-
print("mixed_types_dir ---")
67-
_print_tree(base_dir)
68-
69-
yield base_dir
70-
71-
await remove_directory(base_dir)
72-
assert not base_dir.exists()
73-
74-
7528
@pytest.fixture
7629
def compress_stdout(package_tests_dir: Path) -> list[str]:
7730
path = package_tests_dir / "data" / "archive_utils" / "compress_stdout.json"
@@ -100,8 +53,10 @@ async def progress_handler(byte_progress: NonNegativeInt) -> None:
10053
assert sum(detected_entries) == 434866026
10154

10255

56+
# TODO: unify these 2 tests since they just use some ["compress_stdout.json", "decompress_stdout.json"] and expected sizes at the end of the day
10357
async def test_decompress_progress_parser(decompress_stdout: list[str]):
10458
detected_entries: list[NonNegativeInt] = []
59+
# TODO: als an expected length of [detected_entries] would be ideal to make sure all 100% entries are found
10560

10661
async def progress_handler(byte_progress: NonNegativeInt) -> None:
10762
detected_entries.append(byte_progress)

packages/service-library/tests/test_archiving_utils.py renamed to packages/service-library/tests/archiving_utils/test_archiving_utils.py

Lines changed: 3 additions & 47 deletions
Original file line numberDiff line numberDiff line change
@@ -10,27 +10,18 @@
1010
import secrets
1111
import string
1212
import tempfile
13-
from collections.abc import AsyncIterable, Callable, Iterable, Iterator
13+
from collections.abc import Callable, Iterable, Iterator
1414
from concurrent.futures import ProcessPoolExecutor
1515
from dataclasses import dataclass
1616
from pathlib import Path
1717

18-
import numpy
1918
import pytest
2019
from faker import Faker
21-
from PIL import Image
20+
from helpers import print_tree
2221
from pydantic import ByteSize, TypeAdapter
2322
from pytest_benchmark.plugin import BenchmarkFixture
2423
from servicelib import archiving_utils
2524
from servicelib.archiving_utils import ArchiveError, archive_dir, unarchive_dir
26-
from servicelib.file_utils import remove_directory
27-
28-
29-
def _print_tree(path: Path, level=0):
30-
tab = " " * level
31-
print(f"{tab}{'+' if path.is_dir() else '-'} {path if level==0 else path.name}")
32-
for p in path.glob("*"):
33-
_print_tree(p, level + 1)
3425

3526

3627
@pytest.fixture
@@ -103,7 +94,7 @@ def exclude_patterns_validation_dir(tmp_path: Path, faker: Faker) -> Path:
10394
(base_dir / "d1" / "sd1" / "f2.txt").write_text(faker.text())
10495

10596
print("exclude_patterns_validation_dir ---")
106-
_print_tree(base_dir)
97+
print_tree(base_dir)
10798
return base_dir
10899

109100

@@ -615,41 +606,6 @@ def _touch_all_files_in_path(path_to_archive: Path) -> None:
615606
path.touch()
616607

617608

618-
@pytest.fixture
619-
async def mixed_file_types(tmp_path: Path, faker: Faker) -> AsyncIterable[Path]:
620-
base_dir = tmp_path / "mixed_types_dir"
621-
base_dir.mkdir()
622-
623-
# mixed small text files and binary files
624-
(base_dir / "empty").mkdir()
625-
(base_dir / "d1").mkdir()
626-
(base_dir / "d1" / "f1.txt").write_text(faker.text())
627-
(base_dir / "d1" / "b2.bin").write_bytes(faker.json_bytes())
628-
(base_dir / "d1" / "sd1").mkdir()
629-
(base_dir / "d1" / "sd1" / "f1.txt").write_text(faker.text())
630-
(base_dir / "d1" / "sd1" / "b2.bin").write_bytes(faker.json_bytes())
631-
(base_dir / "images").mkdir()
632-
633-
# images cause issues with zipping, below content produced different
634-
# hashes for zip files
635-
for i in range(2):
636-
image_dir = base_dir / f"images{i}"
637-
image_dir.mkdir()
638-
for n in range(50):
639-
a = numpy.random.rand(900, 900, 3) * 255 # noqa: NPY002
640-
im_out = Image.fromarray(a.astype("uint8")).convert("RGB")
641-
image_path = image_dir / f"out{n}.jpg"
642-
im_out.save(image_path)
643-
644-
print("mixed_types_dir ---")
645-
_print_tree(base_dir)
646-
647-
yield base_dir
648-
649-
await remove_directory(base_dir)
650-
assert not base_dir.exists()
651-
652-
653609
@pytest.mark.parametrize(
654610
"store_relative_path, compress",
655611
[

packages/service-library/tests/test_archiving_utils_extra.py renamed to packages/service-library/tests/archiving_utils/test_archiving_utils_extra.py

Lines changed: 4 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -6,6 +6,7 @@
66
from pathlib import Path
77

88
import pytest
9+
from helpers import print_tree
910
from servicelib.archiving_utils import (
1011
PrunableFolder,
1112
archive_dir,
@@ -14,13 +15,6 @@
1415
)
1516

1617

17-
def _print_tree(path: Path, level=0):
18-
tab = " " * level
19-
print(f"{tab}{'+' if path.is_dir() else '-'} {path if level==0 else path.name}")
20-
for p in path.glob("*"):
21-
_print_tree(p, level + 1)
22-
23-
2418
@pytest.fixture
2519
def state_dir(tmp_path) -> Path:
2620
"""Folder with some data, representing a given state"""
@@ -37,7 +31,7 @@ def state_dir(tmp_path) -> Path:
3731
(base_dir / "d1" / "d1_1" / "d1_1_1" / "f6").touch()
3832

3933
print("state-dir ---")
40-
_print_tree(base_dir)
34+
print_tree(base_dir)
4135
# + /tmp/pytest-of-crespo/pytest-95/test_override_and_prune_from_a1/original
4236
# + empty
4337
# + d1
@@ -69,7 +63,7 @@ def new_state_dir(tmp_path) -> Path:
6963
# f6 deleted -> d1/d1_1/d2_2 remains empty and should be pruned
7064

7165
print("new-state-dir ---")
72-
_print_tree(base_dir)
66+
print_tree(base_dir)
7367
# + /tmp/pytest-of-crespo/pytest-95/test_override_and_prune_from_a1/updated
7468
# + d1
7569
# + d1_1
@@ -125,7 +119,7 @@ def test_override_and_prune_folder(state_dir: Path, new_state_dir: Path):
125119
assert old_paths != got_paths
126120

127121
print("after ----")
128-
_print_tree(state_dir)
122+
print_tree(state_dir)
129123

130124

131125
@pytest.mark.parametrize(

0 commit comments

Comments
 (0)