1- import sys
2- import unittest
3- import pdal
1+ import json
42import os
5- import numpy as np
6- from packaging .version import Version
7-
8- DATADIRECTORY = "./test/data"
3+ import sys
94
10- bad_json = u"""
11- {
12- "pipeline": [
13- "nofile.las",
14- {
15- "type": "filters.sort",
16- "dimension": "X"
17- }
18- ]
19- }
20- """
5+ import numpy as np
6+ import pytest
217
8+ import pdal
229
10+ DATADIRECTORY = os .path .join (os .path .dirname (__file__ ), "data" )
2311
24- class PDALTest (unittest .TestCase ):
2512
26- def fetch_json (self , filename ):
27- import os
28- fn = DATADIRECTORY + os .path .sep + filename
29- output = ''
30- with open (fn , 'rb' ) as f :
31- output = f .read ().decode ('UTF-8' )
32- return output
13+ def get_pipeline (filename , factory = pdal .Pipeline ):
14+ with open (os .path .join (DATADIRECTORY , filename ), "r" ) as f :
15+ pipeline = factory (f .read ())
16+ assert pipeline .validate ()
17+ return pipeline
3318
34- class TestPipeline (PDALTest ):
3519
36- @unittest .skipUnless (os .path .exists (os .path .join (DATADIRECTORY , 'sort.json' )),
37- "missing test data" )
20+ class TestPipeline :
3821 def test_construction (self ):
3922 """Can we construct a PDAL pipeline"""
40- json = self .fetch_json ('sort.json' )
41- r = pdal .Pipeline (json )
23+ assert isinstance (get_pipeline ("sort.json" ), pdal .Pipeline )
4224
43- @unittest .skipUnless (os .path .exists (os .path .join (DATADIRECTORY , 'sort.json' )),
44- "missing test data" )
4525 def test_execution (self ):
4626 """Can we execute a PDAL pipeline"""
47- x = self .fetch_json ('sort.json' )
48- r = pdal .Pipeline (x )
49- r .validate ()
27+ r = get_pipeline ("sort.json" )
5028 r .execute ()
51- self . assertGreater ( len (r .pipeline ), 200 )
29+ assert len (r .pipeline ) > 200
5230
5331 def test_validate (self ):
5432 """Do we complain with bad pipelines"""
33+ bad_json = """
34+ {
35+ "pipeline": [
36+ "nofile.las",
37+ {
38+ "type": "filters.sort",
39+ "dimension": "X"
40+ }
41+ ]
42+ }
43+ """
5544 r = pdal .Pipeline (bad_json )
56- with self . assertRaises (RuntimeError ):
45+ with pytest . raises (RuntimeError ):
5746 r .validate ()
5847
59- @unittest .skipUnless (os .path .exists (os .path .join (DATADIRECTORY , 'sort.json' )),
60- "missing test data" )
6148 def test_array (self ):
6249 """Can we fetch PDAL data as a numpy array"""
63- json = self .fetch_json ('sort.json' )
64- r = pdal .Pipeline (json )
65- r .validate ()
50+ r = get_pipeline ("sort.json" )
6651 r .execute ()
6752 arrays = r .arrays
68- self . assertEqual ( len (arrays ), 1 )
53+ assert len (arrays ) == 1
6954
7055 a = arrays [0 ]
71- self . assertAlmostEqual ( a [0 ][0 ], 635619.85 , 7 )
72- self . assertAlmostEqual ( a [1064 ][2 ], 456.92 , 7 )
56+ assert a [0 ][0 ] == 635619.85
57+ assert a [1064 ][2 ] == 456.92
7358
74- @unittest .skipUnless (os .path .exists (os .path .join (DATADIRECTORY , 'sort.json' )),
75- "missing test data" )
7659 def test_metadata (self ):
7760 """Can we fetch PDAL metadata"""
78- json = self . fetch_json ( ' sort.json' )
79- r = pdal . Pipeline ( json )
80- r . validate ()
61+ r = get_pipeline ( " sort.json" )
62+ with pytest . raises ( RuntimeError ):
63+ r . metadata
8164 r .execute ()
82- metadata = r .metadata
83- import json
84- j = json .loads (metadata )
85- self .assertEqual (j ["metadata" ]["readers.las" ][0 ]["count" ], 1065 )
65+ j = json .loads (r .metadata )
66+ assert j ["metadata" ]["readers.las" ][0 ]["count" ] == 1065
8667
68+ def test_schema (self ):
69+ """Fetching a schema works"""
70+ r = get_pipeline ("sort.json" )
71+ with pytest .raises (RuntimeError ):
72+ r .schema
73+ r .execute ()
74+ assert r .schema ["schema" ]["dimensions" ][0 ]["name" ] == "X"
8775
88- @unittest .skipUnless (os .path .exists (os .path .join (DATADIRECTORY , 'sort.json' )),
89- "missing test data" )
9076 def test_no_execute (self ):
9177 """Does fetching arrays without executing throw an exception"""
92- json = self .fetch_json ('sort.json' )
93- r = pdal .Pipeline (json )
94- with self .assertRaises (RuntimeError ):
78+ r = get_pipeline ("sort.json" )
79+ with pytest .raises (RuntimeError ):
9580 r .arrays
96- #
97- # @unittest.skipUnless(os.path.exists(os.path.join(DATADIRECTORY, 'reproject.json')),
98- # "missing test data")
99- # def test_logging(self):
100- # """Can we fetch log output"""
101- # json = self.fetch_json('reproject.json')
102- # r = pdal.Pipeline(json)
103- # r.loglevel = 8
104- # r.validate()
105- # count = r.execute()
106- # self.assertEqual(count, 789)
107- # self.assertEqual(r.log.split()[0], '(pypipeline')
108- #
109- @unittest .skipUnless (os .path .exists (os .path .join (DATADIRECTORY , 'sort.json' )),
110- "missing test data" )
111- def test_schema (self ):
112- """Fetching a schema works"""
113- json = self .fetch_json ('sort.json' )
114- r = pdal .Pipeline (json )
115- r .validate ()
116- r .execute ()
117- self .assertEqual (r .schema ['schema' ]['dimensions' ][0 ]['name' ], 'X' )
11881
119- @unittest .skipUnless (os .path .exists (os .path .join (DATADIRECTORY , 'chip.json' )),
120- "missing test data" )
12182 def test_merged_arrays (self ):
122- """Can we fetch multiple point views from merged PDAL data """
123- json = self .fetch_json ('chip.json' )
124- r = pdal .Pipeline (json )
125- r .validate ()
83+ """Can we fetch multiple point views from merged PDAL data"""
84+ r = get_pipeline ("chip.json" )
12685 r .execute ()
12786 arrays = r .arrays
128- self . assertEqual ( len (arrays ), 43 )
87+ assert len (arrays ) == 43
12988
89+ # def test_logging(self):
90+ # """Can we fetch log output"""
91+ # r = get_pipeline('reproject.json')
92+ # count = r.execute()
93+ # assert count == 789
94+ # assert r.log.split()[0] == '(pypipeline')
13095
131- class TestArrayLoad (PDALTest ):
13296
133- @unittest .skipUnless (os .path .exists (os .path .join (DATADIRECTORY , 'perlin.npy' )),
134- "missing test data" )
97+ class TestArrayLoad :
13598 def test_merged_arrays (self ):
13699 """Can we load data from a list of arrays to PDAL"""
137- if Version (pdal .info .version ) < Version ('1.8' ):
138- return True
139- data = np .load (os .path .join (DATADIRECTORY , 'test3d.npy' ))
140-
100+ data = np .load (os .path .join (DATADIRECTORY , "test3d.npy" ))
141101 arrays = [data , data , data ]
142-
143- json = self .fetch_json ('chip.json' )
144- chip = u"""{
145- "pipeline":[
146- {
147- "type":"filters.range",
148- "limits":"Intensity[100:300)"
149- }
150- ]
151- }"""
152-
153- p = pdal .Pipeline (chip , arrays )
154- p .loglevel = 8
155- count = p .execute ()
102+ filter_intensity = """{
103+ "pipeline":[
104+ {
105+ "type":"filters.range",
106+ "limits":"Intensity[100:300)"
107+ }
108+ ]
109+ }"""
110+ p = pdal .Pipeline (filter_intensity , arrays )
111+ p .execute ()
156112 arrays = p .arrays
157- self . assertEqual ( len (arrays ), 3 )
113+ assert len (arrays ) == 3
158114
159115 for data in arrays :
160- self . assertEqual ( len (data ), 12 )
161- self . assertEqual ( data [' Intensity' ].sum (), 1926 )
116+ assert len (data ) == 12
117+ assert data [" Intensity" ].sum () == 1926
162118
163119 def test_read_arrays (self ):
164120 """Can we read and filter data from a list of arrays to PDAL"""
165- if Version (pdal .info .version ) < Version ('1.8' ):
166- return True
167-
168121 # just some dummy data
169122 x_vals = [1.0 , 2.0 , 3.0 , 4.0 , 5.0 ]
170123 y_vals = [6.0 , 7.0 , 8.0 , 9.0 , 10.0 ]
171124 z_vals = [1.5 , 3.5 , 5.5 , 7.5 , 9.5 ]
172125 test_data = np .array (
173126 [(x , y , z ) for x , y , z in zip (x_vals , y_vals , z_vals )],
174- dtype = [('X' , np .float ), ('Y' , np .float ), ('Z' , np .float )]
127+ dtype = [("X" , np .float ), ("Y" , np .float ), ("Z" , np .float )],
175128 )
176129
177130 pipeline = """
@@ -184,19 +137,14 @@ def test_read_arrays(self):
184137 ]
185138 }
186139 """
187-
188- p = pdal .Pipeline (pipeline , arrays = [test_data ,])
189- p .loglevel = 8
140+ p = pdal .Pipeline (pipeline , arrays = [test_data ])
190141 count = p .execute ()
191142 arrays = p .arrays
192- self . assertEqual ( count , 2 )
193- self . assertEqual ( len (arrays ), 1 )
143+ assert count == 2
144+ assert len (arrays ) == 1
194145
195146 def test_reference_counting (self ):
196147 """Can we read and filter data from a list of arrays to PDAL"""
197- if Version (pdal .info .version ) < Version ("1.8" ):
198- return True
199-
200148 # just some dummy data
201149 x_vals = [1.0 , 2.0 , 3.0 , 4.0 , 5.0 ]
202150 y_vals = [6.0 , 7.0 , 8.0 , 9.0 , 10.0 ]
@@ -216,51 +164,36 @@ def test_reference_counting(self):
216164 ]
217165 }
218166 """
219-
220167 p = pdal .Pipeline (pipeline , arrays = [test_data ])
221- p .loglevel = 8
222168 count = p .execute ()
223- self .assertEqual (count , 2 )
224- self .assertEqual (1 , sys .getrefcount (p .arrays [0 ]), "Reference count should only be 1 in this case" )
169+ assert count == 2
170+ refcount = sys .getrefcount (p .arrays [0 ])
171+ assert refcount == 1
225172
226173
227- class TestDimensions ( PDALTest ) :
174+ class TestDimensions :
228175 def test_fetch_dimensions (self ):
229176 """Ask PDAL for its valid dimensions list"""
230177 dims = pdal .dimensions
231- self .assertLess (len (dims ), 120 )
232- self .assertGreater (len (dims ), 71 )
178+ assert 71 < len (dims ) < 120
233179
234- class TestMesh (PDALTest ):
235- @unittest .skipUnless (os .path .exists (os .path .join (DATADIRECTORY , 'sort.json' )),
236- "missing test data" )
180+
181+ class TestMesh :
237182 def test_no_execute (self ):
238183 """Does fetching meshes without executing throw an exception"""
239- json = self .fetch_json ('sort.json' )
240- r = pdal .Pipeline (json )
241- with self .assertRaises (RuntimeError ):
184+ r = get_pipeline ("sort.json" )
185+ with pytest .raises (RuntimeError ):
242186 r .meshes
243187
244- @unittest .skipUnless (os .path .exists (os .path .join (DATADIRECTORY , 'mesh.json' )),
245- "missing test data" )
246188 def test_mesh (self ):
247189 """Can we fetch PDAL face data as a numpy array"""
248- json = self .fetch_json ('mesh.json' )
249- r = pdal .Pipeline (json )
250- r .validate ()
190+ r = get_pipeline ("mesh.json" )
251191 points = r .execute ()
252- self . assertEqual ( points , 1065 )
192+ assert points == 1065
253193 meshes = r .meshes
254- self . assertEqual ( len (meshes ), 24 )
194+ assert len (meshes ) == 24
255195
256196 m = meshes [0 ]
257- self .assertEqual (str (m .dtype ), "[('A', '<u4'), ('B', '<u4'), ('C', '<u4')]" )
258- self .assertEqual (len (m ),134 )
259- self .assertEqual (m [0 ][0 ], 29 )
260-
261- def test_suite ():
262- return unittest .TestSuite (
263- [TestPipeline ()])
264-
265- if __name__ == '__main__' :
266- unittest .main ()
197+ assert str (m .dtype ) == "[('A', '<u4'), ('B', '<u4'), ('C', '<u4')]"
198+ assert len (m ) == 134
199+ assert m [0 ][0 ] == 29
0 commit comments