@@ -40,6 +40,7 @@ use pyo3::pybacked::PyBackedStr;
4040use pyo3:: types:: { PyCapsule , PyTuple , PyTupleMethods } ;
4141use tokio:: task:: JoinHandle ;
4242
43+ use crate :: catalog:: PyTable ;
4344use crate :: errors:: { py_datafusion_err, PyDataFusionError } ;
4445use crate :: expr:: sort_expr:: to_sort_expressions;
4546use crate :: physical_plan:: PyExecutionPlan ;
@@ -64,6 +65,15 @@ impl PyTableProvider {
6465 pub fn get_provider ( & self ) -> Arc < dyn TableProvider > {
6566 self . provider . clone ( )
6667 }
68+
69+ /// Convert this TableProvider into a concrete Table wrapper, if possible.
70+ pub fn as_table ( & self ) -> PyDataFusionResult < PyTable > {
71+ // Here, you’d write the logic to convert your inner Arc<dyn TableProvider>
72+ // to a PyTable (which is a Python–exposed concrete table type).
73+ let table: Arc < dyn TableProvider > = self . provider . clone ( ) ;
74+ // Convert the trait object into your PyTable wrapper (if you have one)
75+ Ok ( PyTable :: new ( table) )
76+ }
6777}
6878
6979/// A PyDataFrame is a representation of a logical plan and an API to compose statements.
@@ -106,12 +116,14 @@ impl PyDataFrame {
106116 }
107117
108118 /// Convert this DataFrame into a view (i.e. a TableProvider) that can be registered.
109- fn into_view ( & self ) -> PyDataFusionResult < PyTableProvider > {
119+ fn into_view ( & self ) -> PyDataFusionResult < PyTable > {
110120 // Call the underlying Rust DataFrame::into_view method.
111121 // Note that the Rust method consumes self; here we clone the inner Arc<DataFrame>
112122 // so that we don’t invalidate this PyDataFrame.
113123 let table_provider = self . df . as_ref ( ) . clone ( ) . into_view ( ) ;
114- Ok ( PyTableProvider :: new ( table_provider) )
124+ let table_provider = PyTableProvider :: new ( table_provider) ;
125+
126+ Ok ( table_provider. as_table ( ) ?)
115127 }
116128
117129 fn __repr__ ( & self , py : Python ) -> PyDataFusionResult < String > {
0 commit comments