Skip to content

Commit 0b31a60

Browse files
committed
Parametrize pipeline tests by filename and add copies of the test/data/*json pipelines as Python expressions using the new API
1 parent ea9e545 commit 0b31a60

File tree

7 files changed

+83
-40
lines changed

7 files changed

+83
-40
lines changed

test/data/bad.json

Lines changed: 9 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,9 @@
1+
{
2+
"pipeline": [
3+
"nofile.las",
4+
{
5+
"type": "filters.sort",
6+
"dimension": "X"
7+
}
8+
]
9+
}

test/data/bad.py

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1 @@
1+
Reader("nofile.las") | Filter.sort(dimension="X")

test/data/chip.py

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1 @@
1+
Reader("test/data/autzen-utm.las") | Filter.chipper(capacity=25) | Writer("auzen-utm-chipped-25.las")

test/data/mesh.py

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1 @@
1+
Reader("test/data/1.2-with-color.las") | Filter.splitter(length=1000) | Filter.delaunay()

test/data/reproject.py

Lines changed: 26 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,26 @@
1+
(
2+
Reader(filename="test/data/1.2-with-color.las", spatialreference="EPSG:2993")
3+
|
4+
Filter.python(function="filter", module="anything", source="""
5+
import numpy as np
6+
7+
8+
def filter(ins, outs):
9+
cls = ins["Classification"]
10+
keep_classes = [1]
11+
12+
# Use the first test for our base array.
13+
keep = np.equal(cls, keep_classes[0])
14+
15+
# For 1:n, test each predicate and join back
16+
# to our existing predicate array
17+
for k in range(1, len(keep_classes)):
18+
t = np.equal(cls, keep_classes[k])
19+
keep = keep + t
20+
21+
outs["Mask"] = keep
22+
return True
23+
""")
24+
|
25+
Writer("out2.las")
26+
)

test/data/sort.py

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1 @@
1+
Reader("test/data/1.2-with-color.las") | Filter.sort(dimension="X")

test/test_pipeline.py

Lines changed: 44 additions & 40 deletions
Original file line numberDiff line numberDiff line change
@@ -10,44 +10,41 @@
1010
DATADIRECTORY = os.path.join(os.path.dirname(__file__), "data")
1111

1212

13-
def get_pipeline(filename, factory=pdal.Pipeline):
13+
def get_pipeline(filename, validate=True):
1414
with open(os.path.join(DATADIRECTORY, filename), "r") as f:
15-
pipeline = factory(f.read())
16-
assert pipeline.validate()
15+
if filename.endswith(".json"):
16+
pipeline = pdal.Pipeline(f.read())
17+
elif filename.endswith(".py"):
18+
pipeline = eval(f.read(), vars(pdal))
19+
if validate:
20+
assert pipeline.validate()
1721
return pipeline
1822

1923

2024
class TestPipeline:
21-
def test_construction(self):
25+
@pytest.mark.parametrize("filename", ["sort.json", "sort.py"])
26+
def test_construction(self, filename):
2227
"""Can we construct a PDAL pipeline"""
23-
assert isinstance(get_pipeline("sort.json"), pdal.Pipeline)
28+
assert isinstance(get_pipeline(filename), pdal.Pipeline)
2429

25-
def test_execution(self):
30+
@pytest.mark.parametrize("filename", ["sort.json", "sort.py"])
31+
def test_execution(self, filename):
2632
"""Can we execute a PDAL pipeline"""
27-
r = get_pipeline("sort.json")
33+
r = get_pipeline(filename)
2834
r.execute()
2935
assert len(r.pipeline) > 200
3036

31-
def test_validate(self):
37+
@pytest.mark.parametrize("filename", ["bad.json", "bad.py"])
38+
def test_validate(self, filename):
3239
"""Do we complain with bad pipelines"""
33-
bad_json = """
34-
{
35-
"pipeline": [
36-
"nofile.las",
37-
{
38-
"type": "filters.sort",
39-
"dimension": "X"
40-
}
41-
]
42-
}
43-
"""
44-
r = pdal.Pipeline(bad_json)
40+
r = get_pipeline(filename, validate=False)
4541
with pytest.raises(RuntimeError):
4642
r.validate()
4743

48-
def test_array(self):
44+
@pytest.mark.parametrize("filename", ["sort.json", "sort.py"])
45+
def test_array(self, filename):
4946
"""Can we fetch PDAL data as a numpy array"""
50-
r = get_pipeline("sort.json")
47+
r = get_pipeline(filename)
5148
r.execute()
5249
arrays = r.arrays
5350
assert len(arrays) == 1
@@ -56,42 +53,47 @@ def test_array(self):
5653
assert a[0][0] == 635619.85
5754
assert a[1064][2] == 456.92
5855

59-
def test_metadata(self):
56+
@pytest.mark.parametrize("filename", ["sort.json", "sort.py"])
57+
def test_metadata(self, filename):
6058
"""Can we fetch PDAL metadata"""
61-
r = get_pipeline("sort.json")
59+
r = get_pipeline(filename)
6260
with pytest.raises(RuntimeError):
6361
r.metadata
6462
r.execute()
6563
j = json.loads(r.metadata)
6664
assert j["metadata"]["readers.las"][0]["count"] == 1065
6765

68-
def test_schema(self):
66+
@pytest.mark.parametrize("filename", ["sort.json", "sort.py"])
67+
def test_schema(self, filename):
6968
"""Fetching a schema works"""
70-
r = get_pipeline("sort.json")
69+
r = get_pipeline(filename)
7170
with pytest.raises(RuntimeError):
7271
r.schema
7372
r.execute()
7473
assert r.schema["schema"]["dimensions"][0]["name"] == "X"
7574

76-
def test_no_execute(self):
75+
@pytest.mark.parametrize("filename", ["sort.json", "sort.py"])
76+
def test_no_execute(self, filename):
7777
"""Does fetching arrays without executing throw an exception"""
78-
r = get_pipeline("sort.json")
78+
r = get_pipeline(filename)
7979
with pytest.raises(RuntimeError):
8080
r.arrays
8181

82-
def test_merged_arrays(self):
82+
@pytest.mark.parametrize("filename", ["chip.json", "chip.py"])
83+
def test_merged_arrays(self, filename):
8384
"""Can we fetch multiple point views from merged PDAL data"""
84-
r = get_pipeline("chip.json")
85+
r = get_pipeline(filename)
8586
r.execute()
8687
arrays = r.arrays
8788
assert len(arrays) == 43
8889

89-
# def test_logging(self):
90-
# """Can we fetch log output"""
91-
# r = get_pipeline('reproject.json')
92-
# count = r.execute()
93-
# assert count == 789
94-
# assert r.log.split()[0] == '(pypipeline')
90+
@pytest.mark.parametrize("filename", ["reproject.json", "reproject.py"])
91+
def test_logging(self, filename):
92+
"""Can we fetch log output"""
93+
r = get_pipeline(filename)
94+
count = r.execute()
95+
assert count == 789
96+
# assert r.log.split()[0] == "(pypipeline"
9597

9698

9799
class TestArrayLoad:
@@ -179,15 +181,17 @@ def test_fetch_dimensions(self):
179181

180182

181183
class TestMesh:
182-
def test_no_execute(self):
184+
@pytest.mark.parametrize("filename", ["sort.json", "sort.py"])
185+
def test_no_execute(self, filename):
183186
"""Does fetching meshes without executing throw an exception"""
184-
r = get_pipeline("sort.json")
187+
r = get_pipeline(filename)
185188
with pytest.raises(RuntimeError):
186189
r.meshes
187190

188-
def test_mesh(self):
191+
@pytest.mark.parametrize("filename", ["mesh.json", "mesh.py"])
192+
def test_mesh(self, filename):
189193
"""Can we fetch PDAL face data as a numpy array"""
190-
r = get_pipeline("mesh.json")
194+
r = get_pipeline(filename)
191195
points = r.execute()
192196
assert points == 1065
193197
meshes = r.meshes

0 commit comments

Comments
 (0)