1111DATADIRECTORY = os .path .join (os .path .dirname (__file__ ), "data" )
1212
1313
14- def get_pipeline (filename , validate = True ):
14+ def get_pipeline (filename ):
1515 with open (os .path .join (DATADIRECTORY , filename ), "r" ) as f :
1616 if filename .endswith (".json" ):
1717 pipeline = pdal .Pipeline (f .read ())
1818 elif filename .endswith (".py" ):
1919 pipeline = eval (f .read (), vars (pdal ))
20- if validate :
21- assert pipeline .validate ()
2220 return pipeline
2321
2422
@@ -67,9 +65,9 @@ def test_execution(self, filename):
6765 @pytest .mark .parametrize ("filename" , ["bad.json" , "bad.py" ])
6866 def test_validate (self , filename ):
6967 """Do we complain with bad pipelines"""
70- r = get_pipeline (filename , validate = False )
68+ r = get_pipeline (filename )
7169 with pytest .raises (RuntimeError ):
72- r .validate ()
70+ r .execute ()
7371
7472 @pytest .mark .parametrize ("filename" , ["sort.json" , "sort.py" ])
7573 def test_array (self , filename ):
@@ -86,25 +84,17 @@ def test_array(self, filename):
8684 @pytest .mark .parametrize ("filename" , ["sort.json" , "sort.py" ])
8785 def test_metadata (self , filename ):
8886 """Can we fetch PDAL metadata"""
89- r = get_pipeline (filename , validate = False )
90- with pytest .raises (RuntimeError ):
91- r .metadata
92-
9387 r = get_pipeline (filename )
9488 with pytest .raises (RuntimeError ):
9589 r .metadata
9690
9791 r .execute ()
9892 j = json .loads (r .metadata )
99- assert j ["metadata" ]["readers.las" ][0 ][ "count" ] == 1065
93+ assert j ["metadata" ]["readers.las" ]["count" ] == 1065
10094
10195 @pytest .mark .parametrize ("filename" , ["sort.json" , "sort.py" ])
10296 def test_schema (self , filename ):
10397 """Fetching a schema works"""
104- r = get_pipeline (filename , validate = False )
105- with pytest .raises (RuntimeError ):
106- r .schema
107-
10898 r = get_pipeline (filename )
10999 with pytest .raises (RuntimeError ):
110100 r .schema
@@ -115,10 +105,6 @@ def test_schema(self, filename):
115105 @pytest .mark .parametrize ("filename" , ["sort.json" , "sort.py" ])
116106 def test_pipeline (self , filename ):
117107 """Can we fetch PDAL pipeline string"""
118- r = get_pipeline (filename , validate = False )
119- with pytest .raises (RuntimeError ):
120- r .pipeline
121-
122108 r = get_pipeline (filename )
123109 with pytest .raises (RuntimeError ):
124110 r .pipeline
@@ -143,10 +129,6 @@ def test_pipeline(self, filename):
143129 @pytest .mark .parametrize ("filename" , ["sort.json" , "sort.py" ])
144130 def test_no_execute (self , filename ):
145131 """Does fetching arrays without executing throw an exception"""
146- r = get_pipeline (filename , validate = False )
147- with pytest .raises (RuntimeError ):
148- r .arrays
149-
150132 r = get_pipeline (filename )
151133 with pytest .raises (RuntimeError ):
152134 r .arrays
@@ -186,15 +168,15 @@ def test_pipe_stages(self):
186168
187169 # pipe stages together
188170 pipeline = read | frange | fsplitter | fdelaunay
189- assert pipeline .validate ()
171+ pipeline .execute ()
190172
191173 # pipe a pipeline to a stage
192174 pipeline = read | (frange | fsplitter | fdelaunay )
193- assert pipeline .validate ()
175+ pipeline .execute ()
194176
195177 # pipe a pipeline to a pipeline
196178 pipeline = (read | frange ) | (fsplitter | fdelaunay )
197- assert pipeline .validate ()
179+ pipeline .execute ()
198180
199181 def test_pipe_stage_errors (self ):
200182 """Do we complain with piping invalid objects"""
@@ -211,19 +193,19 @@ def test_pipe_stage_errors(self):
211193
212194 pipeline = r | w
213195 with pytest .raises (RuntimeError ) as ctx :
214- pipeline .validate ()
196+ pipeline .execute ()
215197 assert "Undefined stage 'f'" in str (ctx .value )
216198
217199 def test_inputs (self ):
218200 """Can we combine pipelines with inputs"""
219201 data = np .load (os .path .join (DATADIRECTORY , "test3d.npy" ))
220202 f = pdal .Filter .splitter (length = 1000 )
221203 pipeline = f .pipeline (data )
222- assert pipeline .validate ()
204+ pipeline .execute ()
223205
224206 # a pipeline with inputs can be followed by stage/pipeline
225- assert (pipeline | pdal .Writer .null ()).validate ()
226- assert (pipeline | (f | pdal .Writer .null ())).validate ()
207+ (pipeline | pdal .Writer .null ()).execute ()
208+ (pipeline | (f | pdal .Writer .null ())).execute ()
227209
228210 # a pipeline with inputs cannot follow another stage/pipeline
229211 with pytest .raises (ValueError ):
@@ -245,10 +227,6 @@ def test_infer_stage_type(self):
245227 @pytest .mark .parametrize ("filename" , ["reproject.json" , "reproject.py" ])
246228 def test_logging (self , filename ):
247229 """Can we fetch log output"""
248- r = get_pipeline (filename , validate = False )
249- assert r .loglevel == logging .ERROR
250- assert r .log == ""
251-
252230 r = get_pipeline (filename )
253231 assert r .loglevel == logging .ERROR
254232 assert r .log == ""
@@ -353,10 +331,6 @@ class TestMesh:
353331 @pytest .mark .parametrize ("filename" , ["sort.json" , "sort.py" ])
354332 def test_no_execute (self , filename ):
355333 """Does fetching meshes without executing throw an exception"""
356- r = get_pipeline (filename , validate = False )
357- with pytest .raises (RuntimeError ):
358- r .meshes
359-
360334 r = get_pipeline (filename )
361335 with pytest .raises (RuntimeError ):
362336 r .meshes
0 commit comments