22import logging
33import os
44import sys
5- from functools import partial
65
76import numpy as np
87import pytest
1211DATADIRECTORY = os .path .join (os .path .dirname (__file__ ), "data" )
1312
1413
15- def get_pipeline (filename , * , chunk_size = None , prefetch = None ):
14+ def get_pipeline (filename ):
1615 with open (os .path .join (DATADIRECTORY , filename ), "r" ) as f :
1716 if filename .endswith (".json" ):
1817 pipeline = pdal .Pipeline (f .read ())
1918 elif filename .endswith (".py" ):
2019 pipeline = eval (f .read (), vars (pdal ))
21- if chunk_size is not None :
22- pipeline .chunk_size = chunk_size
23- if prefetch is not None :
24- pipeline .prefetch = prefetch
2520 return pipeline
2621
2722
@@ -36,11 +31,6 @@ class TestPipeline:
3631 def test_construction (self , filename ):
3732 """Can we construct a PDAL pipeline"""
3833 assert isinstance (get_pipeline (filename ), pdal .Pipeline )
39- assert isinstance (get_pipeline (filename , chunk_size = 100 ), pdal .Pipeline )
40- assert isinstance (get_pipeline (filename , prefetch = 3 ), pdal .Pipeline )
41- assert isinstance (
42- get_pipeline (filename , chunk_size = 100 , prefetch = 3 ), pdal .Pipeline
43- )
4434
4535 # construct Pipeline from a sequence of stages
4636 r = pdal .Reader ("r" )
@@ -421,48 +411,51 @@ class TestPipelineIterator:
421411
422412 def test_array (self ):
423413 """Can we fetch PDAL data as numpy arrays"""
424- ri = get_pipeline ("range.json" , chunk_size = 100 )
425- arrays = list (ri )
426- assert len (arrays ) == 11
427- concat_array = np .concatenate (arrays )
428-
429414 r = get_pipeline ("range.json" )
430415 count = r .execute ()
431416 arrays = r .arrays
432417 assert len (arrays ) == 1
433418 array = arrays [0 ]
434419 assert count == len (array )
435420
436- np .testing .assert_array_equal (array , concat_array )
421+ for _ in range (10 ):
422+ arrays = list (r .iterator (chunk_size = 100 ))
423+ assert len (arrays ) == 11
424+ concat_array = np .concatenate (arrays )
425+ np .testing .assert_array_equal (array , concat_array )
426+
427+ def test_StopIteration (self ):
428+ """Is StopIteration raised when the iterator is exhausted"""
429+ r = get_pipeline ("range.json" )
430+ it = r .iterator (chunk_size = 100 )
431+ for array in it :
432+ assert isinstance (array , np .ndarray )
433+ with pytest .raises (StopIteration ):
434+ next (it )
435+ assert next (it , None ) is None
437436
438437 def test_metadata (self ):
439438 """Can we fetch PDAL metadata"""
440- ri = get_pipeline ("range.json" , chunk_size = 100 )
441- with pytest .raises (RuntimeError ):
442- ri .metadata
443- list (ri )
444-
445439 r = get_pipeline ("range.json" )
446- with pytest .raises (RuntimeError ):
447- r .metadata
448440 r .execute ()
449441
450- assert ri .metadata == r .metadata
442+ it = r .iterator (chunk_size = 100 )
443+ for _ in it :
444+ pass
445+
446+ assert r .metadata == it .metadata
451447
452448 @pytest .mark .xfail
453449 def test_schema (self ):
454450 """Fetching a schema works"""
455- ri = get_pipeline ("range.json" , chunk_size = 100 )
456- with pytest .raises (RuntimeError ):
457- ri .schema
458- list (ri )
459-
460451 r = get_pipeline ("range.json" )
461- with pytest .raises (RuntimeError ):
462- r .schema
463452 r .execute ()
464453
465- assert ri .schema == r .schema
454+ it = r .iterator (chunk_size = 100 )
455+ for _ in it :
456+ pass
457+
458+ assert r .schema == it .schema
466459
467460 def test_merged_arrays (self ):
468461 """Can we load data from a list of arrays to PDAL"""
@@ -480,8 +473,7 @@ def test_merged_arrays(self):
480473 p .execute ()
481474 non_streaming_array = np .concatenate (p .arrays )
482475 for chunk_size in range (5 , 100 , 5 ):
483- streaming_arrays = list (pdal .Pipeline (filter_intensity , arrays ,
484- chunk_size = chunk_size ))
476+ streaming_arrays = list (p .iterator (chunk_size = chunk_size ))
485477 np .testing .assert_array_equal (np .concatenate (streaming_arrays ),
486478 non_streaming_array )
487479
@@ -492,7 +484,17 @@ def test_premature_exit(self):
492484 assert len (r .arrays ) == 1
493485 array = r .arrays [0 ]
494486
495- ri = get_pipeline ("range.json" , chunk_size = 100 )
496- for array2 in ri :
497- np .testing .assert_array_equal (array2 , array [:len (array2 )])
498- break
487+ for _ in range (10 ):
488+ for array2 in r .iterator (chunk_size = 100 ):
489+ np .testing .assert_array_equal (array2 , array [:len (array2 )])
490+ break
491+
492+ def test_multiple_iterators (self ):
493+ """Can we create multiple independent iterators"""
494+ r = get_pipeline ("range.json" )
495+ it1 = r .iterator (chunk_size = 100 )
496+ it2 = r .iterator (chunk_size = 100 )
497+ for a1 , a2 in zip (it1 , it2 ):
498+ np .testing .assert_array_equal (a1 , a2 )
499+ assert next (it1 , None ) is None
500+ assert next (it2 , None ) is None
0 commit comments