@@ -86,25 +86,67 @@ def test_array(self, filename):
8686 @pytest .mark .parametrize ("filename" , ["sort.json" , "sort.py" ])
8787 def test_metadata (self , filename ):
8888 """Can we fetch PDAL metadata"""
89+ r = get_pipeline (filename , validate = False )
90+ with pytest .raises (RuntimeError ):
91+ r .metadata
92+
8993 r = get_pipeline (filename )
9094 with pytest .raises (RuntimeError ):
9195 r .metadata
96+
9297 r .execute ()
9398 j = json .loads (r .metadata )
9499 assert j ["metadata" ]["readers.las" ][0 ]["count" ] == 1065
95100
96101 @pytest .mark .parametrize ("filename" , ["sort.json" , "sort.py" ])
97102 def test_schema (self , filename ):
98103 """Fetching a schema works"""
104+ r = get_pipeline (filename , validate = False )
105+ with pytest .raises (RuntimeError ):
106+ r .schema
107+
99108 r = get_pipeline (filename )
100109 with pytest .raises (RuntimeError ):
101110 r .schema
111+
102112 r .execute ()
103113 assert r .schema ["schema" ]["dimensions" ][0 ]["name" ] == "X"
104114
115+ @pytest .mark .parametrize ("filename" , ["sort.json" , "sort.py" ])
116+ def test_pipeline (self , filename ):
117+ """Can we fetch PDAL pipeline string"""
118+ r = get_pipeline (filename , validate = False )
119+ with pytest .raises (RuntimeError ):
120+ r .pipeline
121+
122+ r = get_pipeline (filename )
123+ with pytest .raises (RuntimeError ):
124+ r .pipeline
125+
126+ r .execute ()
127+ assert json .loads (r .pipeline ) == {
128+ "pipeline" : [
129+ {
130+ "filename" : "test/data/1.2-with-color.las" ,
131+ "tag" : "readers_las1" ,
132+ "type" : "readers.las" ,
133+ },
134+ {
135+ "dimension" : "X" ,
136+ "inputs" : ["readers_las1" ],
137+ "tag" : "filters_sort1" ,
138+ "type" : "filters.sort" ,
139+ },
140+ ]
141+ }
142+
105143 @pytest .mark .parametrize ("filename" , ["sort.json" , "sort.py" ])
106144 def test_no_execute (self , filename ):
107145 """Does fetching arrays without executing throw an exception"""
146+ r = get_pipeline (filename , validate = False )
147+ with pytest .raises (RuntimeError ):
148+ r .arrays
149+
108150 r = get_pipeline (filename )
109151 with pytest .raises (RuntimeError ):
110152 r .arrays
@@ -203,6 +245,10 @@ def test_infer_stage_type(self):
203245 @pytest .mark .parametrize ("filename" , ["reproject.json" , "reproject.py" ])
204246 def test_logging (self , filename ):
205247 """Can we fetch log output"""
248+ r = get_pipeline (filename , validate = False )
249+ assert r .loglevel == logging .ERROR
250+ assert r .log == ""
251+
206252 r = get_pipeline (filename )
207253 assert r .loglevel == logging .ERROR
208254 assert r .log == ""
@@ -307,6 +353,10 @@ class TestMesh:
307353 @pytest .mark .parametrize ("filename" , ["sort.json" , "sort.py" ])
308354 def test_no_execute (self , filename ):
309355 """Does fetching meshes without executing throw an exception"""
356+ r = get_pipeline (filename , validate = False )
357+ with pytest .raises (RuntimeError ):
358+ r .meshes
359+
310360 r = get_pipeline (filename )
311361 with pytest .raises (RuntimeError ):
312362 r .meshes
0 commit comments