Skip to content

Commit 162fc84

Browse files
committed
Test RuntimeError message
1 parent 83f02e3 commit 162fc84

File tree

2 files changed

+21
-14
lines changed

2 files changed

+21
-14
lines changed

pdal/libpdalpython.pyx

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -180,7 +180,7 @@ cdef class Pipeline(PipelineResultsMixin):
180180
def arrays(self):
181181
cdef PipelineExecutor* executor = self._get_executor()
182182
if not executor.executed():
183-
raise RuntimeError("call execute() before fetching arrays")
183+
raise RuntimeError("Pipeline has not been executed!")
184184
output = []
185185
for view in executor.getManagerConst().views():
186186
output.append(<object>viewToNumpyArray(view))
@@ -191,7 +191,7 @@ cdef class Pipeline(PipelineResultsMixin):
191191
def meshes(self):
192192
cdef PipelineExecutor* executor = self._get_executor()
193193
if not executor.executed():
194-
raise RuntimeError("call execute() before fetching the mesh")
194+
raise RuntimeError("Pipeline has not been executed!")
195195
output = []
196196
for view in executor.getManagerConst().views():
197197
output.append(<object>meshToNumpyArray(deref(view).mesh()))

test/test_pipeline.py

Lines changed: 19 additions & 12 deletions
Original file line numberDiff line numberDiff line change
@@ -66,8 +66,9 @@ def test_execution(self, filename):
6666
def test_validate(self, filename):
6767
"""Do we complain with bad pipelines"""
6868
r = get_pipeline(filename)
69-
with pytest.raises(RuntimeError):
69+
with pytest.raises(RuntimeError) as info:
7070
r.execute()
71+
assert "No such file or directory" in str(info.value)
7172

7273
@pytest.mark.parametrize("filename", ["sort.json", "sort.py"])
7374
def test_array(self, filename):
@@ -85,8 +86,9 @@ def test_array(self, filename):
8586
def test_metadata(self, filename):
8687
"""Can we fetch PDAL metadata"""
8788
r = get_pipeline(filename)
88-
with pytest.raises(RuntimeError):
89+
with pytest.raises(RuntimeError) as info:
8990
r.metadata
91+
assert "Pipeline has not been executed" in str(info.value)
9092

9193
r.execute()
9294
j = json.loads(r.metadata)
@@ -96,8 +98,9 @@ def test_metadata(self, filename):
9698
def test_schema(self, filename):
9799
"""Fetching a schema works"""
98100
r = get_pipeline(filename)
99-
with pytest.raises(RuntimeError):
101+
with pytest.raises(RuntimeError) as info:
100102
r.schema
103+
assert "Pipeline has not been executed" in str(info.value)
101104

102105
r.execute()
103106
assert r.schema["schema"]["dimensions"][0]["name"] == "X"
@@ -106,8 +109,9 @@ def test_schema(self, filename):
106109
def test_pipeline(self, filename):
107110
"""Can we fetch PDAL pipeline string"""
108111
r = get_pipeline(filename)
109-
with pytest.raises(RuntimeError):
112+
with pytest.raises(RuntimeError) as info:
110113
r.pipeline
114+
assert "Pipeline has not been executed" in str(info.value)
111115

112116
r.execute()
113117
assert json.loads(r.pipeline) == {
@@ -131,8 +135,9 @@ def test_pipeline(self, filename):
131135
def test_no_execute(self, filename):
132136
"""Does fetching arrays without executing throw an exception"""
133137
r = get_pipeline(filename)
134-
with pytest.raises(RuntimeError):
138+
with pytest.raises(RuntimeError) as info:
135139
r.arrays
140+
assert "Pipeline has not been executed" in str(info.value)
136141

137142
@pytest.mark.parametrize("filename", ["chip.json", "chip.py"])
138143
def test_merged_arrays(self, filename):
@@ -193,9 +198,9 @@ def test_pipe_stage_errors(self):
193198
(r, f) | (f, w)
194199

195200
pipeline = r | w
196-
with pytest.raises(RuntimeError) as ctx:
201+
with pytest.raises(RuntimeError) as info:
197202
pipeline.execute()
198-
assert "Undefined stage 'f'" in str(ctx.value)
203+
assert "Undefined stage 'f'" in str(info.value)
199204

200205
def test_inputs(self):
201206
"""Can we combine pipelines with inputs"""
@@ -380,8 +385,9 @@ class TestMesh:
380385
def test_no_execute(self, filename):
381386
"""Does fetching meshes without executing throw an exception"""
382387
r = get_pipeline(filename)
383-
with pytest.raises(RuntimeError):
388+
with pytest.raises(RuntimeError) as info:
384389
r.meshes
390+
assert "Pipeline has not been executed" in str(info.value)
385391

386392
@pytest.mark.parametrize("filename", ["mesh.json", "mesh.py"])
387393
def test_mesh(self, filename):
@@ -413,7 +419,7 @@ def test_non_streamable(self, filename):
413419
r = get_pipeline(filename)
414420
with pytest.raises(RuntimeError) as info:
415421
r.iterator()
416-
assert str(info.value) == "Pipeline is not streamable"
422+
assert "Pipeline is not streamable" in str(info.value)
417423

418424
@pytest.mark.parametrize("filename", ["range.json", "range.py"])
419425
def test_array(self, filename):
@@ -483,8 +489,9 @@ def test_merged_arrays(self):
483489
non_streaming_array = np.concatenate(p.arrays)
484490
for chunk_size in range(5, 100, 5):
485491
streaming_arrays = list(p.iterator(chunk_size=chunk_size))
486-
np.testing.assert_array_equal(np.concatenate(streaming_arrays),
487-
non_streaming_array)
492+
np.testing.assert_array_equal(
493+
np.concatenate(streaming_arrays), non_streaming_array
494+
)
488495

489496
@pytest.mark.parametrize("filename", ["range.json", "range.py"])
490497
def test_premature_exit(self, filename):
@@ -496,7 +503,7 @@ def test_premature_exit(self, filename):
496503

497504
for _ in range(10):
498505
for array2 in r.iterator(chunk_size=100):
499-
np.testing.assert_array_equal(array2, array[:len(array2)])
506+
np.testing.assert_array_equal(array2, array[: len(array2)])
500507
break
501508

502509
@pytest.mark.parametrize("filename", ["range.json", "range.py"])

0 commit comments

Comments
 (0)