@@ -1582,6 +1582,29 @@ def test_empty_to_arrow_table(df):
15821582 assert set (pyarrow_table .column_names ) == {"a" , "b" , "c" }
15831583
15841584
1585+ def test_arrow_c_stream_to_table (monkeypatch ):
1586+ ctx = SessionContext ()
1587+
1588+ # Create a DataFrame with two separate record batches
1589+ batch1 = pa .record_batch ([pa .array ([1 ])], names = ["a" ])
1590+ batch2 = pa .record_batch ([pa .array ([2 ])], names = ["a" ])
1591+ df = ctx .create_dataframe ([[batch1 ], [batch2 ]])
1592+
1593+ # Fail if the DataFrame is pre-collected
1594+ def fail_collect (self ): # pragma: no cover - failure path
1595+ msg = "collect should not be called"
1596+ raise AssertionError (msg )
1597+
1598+ monkeypatch .setattr (DataFrame , "collect" , fail_collect )
1599+
1600+ table = pa .Table .from_batches (df )
1601+ expected = pa .Table .from_batches ([batch1 , batch2 ])
1602+
1603+ assert table .equals (expected )
1604+ assert table .schema == df .schema ()
1605+ assert table .column ("a" ).num_chunks == 2
1606+
1607+
15851608def test_to_pylist (df ):
15861609 # Convert datafusion dataframe to Python list
15871610 pylist = df .to_pylist ()
@@ -2666,6 +2689,110 @@ def trigger_interrupt():
26662689 interrupt_thread .join (timeout = 1.0 )
26672690
26682691
2692+ def test_arrow_c_stream_interrupted ():
2693+ """__arrow_c_stream__ responds to ``KeyboardInterrupt`` signals.
2694+
2695+ Similar to ``test_collect_interrupted`` this test issues a long running
2696+ query, but consumes the results via ``__arrow_c_stream__``. It then raises
2697+ ``KeyboardInterrupt`` in the main thread and verifies that the stream
2698+ iteration stops promptly with the appropriate exception.
2699+ """
2700+
2701+ ctx = SessionContext ()
2702+
2703+ batches = []
2704+ for i in range (10 ):
2705+ batch = pa .RecordBatch .from_arrays (
2706+ [
2707+ pa .array (list (range (i * 1000 , (i + 1 ) * 1000 ))),
2708+ pa .array ([f"value_{ j } " for j in range (i * 1000 , (i + 1 ) * 1000 )]),
2709+ ],
2710+ names = ["a" , "b" ],
2711+ )
2712+ batches .append (batch )
2713+
2714+ ctx .register_record_batches ("t1" , [batches ])
2715+ ctx .register_record_batches ("t2" , [batches ])
2716+
2717+ df = ctx .sql (
2718+ """
2719+ WITH t1_expanded AS (
2720+ SELECT
2721+ a,
2722+ b,
2723+ CAST(a AS DOUBLE) / 1.5 AS c,
2724+ CAST(a AS DOUBLE) * CAST(a AS DOUBLE) AS d
2725+ FROM t1
2726+ CROSS JOIN (SELECT 1 AS dummy FROM t1 LIMIT 5)
2727+ ),
2728+ t2_expanded AS (
2729+ SELECT
2730+ a,
2731+ b,
2732+ CAST(a AS DOUBLE) * 2.5 AS e,
2733+ CAST(a AS DOUBLE) * CAST(a AS DOUBLE) * CAST(a AS DOUBLE) AS f
2734+ FROM t2
2735+ CROSS JOIN (SELECT 1 AS dummy FROM t2 LIMIT 5)
2736+ )
2737+ SELECT
2738+ t1.a, t1.b, t1.c, t1.d,
2739+ t2.a AS a2, t2.b AS b2, t2.e, t2.f
2740+ FROM t1_expanded t1
2741+ JOIN t2_expanded t2 ON t1.a % 100 = t2.a % 100
2742+ WHERE t1.a > 100 AND t2.a > 100
2743+ """
2744+ )
2745+
2746+ reader = pa .RecordBatchReader ._import_from_c (df .__arrow_c_stream__ ())
2747+
2748+ interrupted = False
2749+ interrupt_error = None
2750+ query_started = threading .Event ()
2751+ max_wait_time = 5.0
2752+
2753+ def trigger_interrupt ():
2754+ start_time = time .time ()
2755+ while not query_started .is_set ():
2756+ time .sleep (0.1 )
2757+ if time .time () - start_time > max_wait_time :
2758+ msg = f"Query did not start within { max_wait_time } seconds"
2759+ raise RuntimeError (msg )
2760+
2761+ thread_id = threading .main_thread ().ident
2762+ if thread_id is None :
2763+ msg = "Cannot get main thread ID"
2764+ raise RuntimeError (msg )
2765+
2766+ exception = ctypes .py_object (KeyboardInterrupt )
2767+ res = ctypes .pythonapi .PyThreadState_SetAsyncExc (
2768+ ctypes .c_long (thread_id ), exception
2769+ )
2770+ if res != 1 :
2771+ ctypes .pythonapi .PyThreadState_SetAsyncExc (
2772+ ctypes .c_long (thread_id ), ctypes .py_object (0 )
2773+ )
2774+ msg = "Failed to raise KeyboardInterrupt in main thread"
2775+ raise RuntimeError (msg )
2776+
2777+ interrupt_thread = threading .Thread (target = trigger_interrupt )
2778+ interrupt_thread .daemon = True
2779+ interrupt_thread .start ()
2780+
2781+ try :
2782+ query_started .set ()
2783+ # consume the reader which should block and be interrupted
2784+ reader .read_all ()
2785+ except KeyboardInterrupt :
2786+ interrupted = True
2787+ except Exception as e : # pragma: no cover - unexpected errors
2788+ interrupt_error = e
2789+
2790+ if not interrupted :
2791+ pytest .fail (f"Stream was not interrupted; got error: { interrupt_error } " )
2792+
2793+ interrupt_thread .join (timeout = 1.0 )
2794+
2795+
26692796def test_show_select_where_no_rows (capsys ) -> None :
26702797 ctx = SessionContext ()
26712798 df = ctx .sql ("SELECT 1 WHERE 1=0" )
0 commit comments