@@ -1596,7 +1596,7 @@ def test_iter_batches_dataframe(fail_collect):
15961596 assert got .equals (exp )
15971597
15981598
1599- def test_arrow_c_stream_to_table (fail_collect ):
1599+ def test_arrow_c_stream_to_table_and_reader (fail_collect ):
16001600 ctx = SessionContext ()
16011601
16021602 # Create a DataFrame with two separate record batches
@@ -1613,6 +1613,12 @@ def test_arrow_c_stream_to_table(fail_collect):
16131613 assert table .schema == df .schema ()
16141614 assert table .column ("a" ).num_chunks == 2
16151615
1616+ reader = pa .RecordBatchReader ._import_from_c_capsule (df .__arrow_c_stream__ ())
1617+ assert isinstance (reader , pa .RecordBatchReader )
1618+ reader_table = pa .Table .from_batches (reader )
1619+ expected = pa .Table .from_batches ([batch1 , batch2 ])
1620+ assert reader_table .equals (expected )
1621+
16161622
16171623def test_arrow_c_stream_order ():
16181624 ctx = SessionContext ()
@@ -1631,14 +1637,6 @@ def test_arrow_c_stream_order():
16311637 assert col .chunk (1 )[0 ].as_py () == 2
16321638
16331639
1634- def test_arrow_c_stream_reader (df ):
1635- reader = pa .RecordBatchReader ._import_from_c_capsule (df .__arrow_c_stream__ ())
1636- assert isinstance (reader , pa .RecordBatchReader )
1637- table = pa .Table .from_batches (reader )
1638- expected = pa .Table .from_batches (df .collect ())
1639- assert table .equals (expected )
1640-
1641-
16421640def test_arrow_c_stream_schema_selection (fail_collect ):
16431641 ctx = SessionContext ()
16441642
0 commit comments