1515if TYPE_CHECKING :
1616 from collections .abc import Sequence
1717
18- import duckdb
1918 import ibis
2019 import pandas as pd
2120 import polars as pl
2221 import pyarrow as pa
2322 from ibis .backends .duckdb import Backend as IbisDuckDBBackend
24- from pyspark .sql import DataFrame as PySparkDataFrame
2523 from typing_extensions import TypeAlias
2624
27- from narwhals ._spark_like . dataframe import SQLFrameDataFrame
25+ from narwhals ._native import NativeDask , NativeDuckDB , NativePySpark , NativeSQLFrame
2826 from narwhals ._typing import EagerAllowed
29- from narwhals .typing import NativeDataFrame , NativeLazyFrame
27+ from narwhals .typing import IntoDataFrame
3028 from tests .utils import Constructor , ConstructorEager , ConstructorLazy
3129
3230 Data : TypeAlias = "dict[str, list[Any]]"
@@ -100,27 +98,27 @@ def pandas_pyarrow_constructor(obj: Data) -> pd.DataFrame:
10098 return pd .DataFrame (obj ).convert_dtypes (dtype_backend = "pyarrow" )
10199
102100
103- def modin_constructor (obj : Data ) -> NativeDataFrame : # pragma: no cover
101+ def modin_constructor (obj : Data ) -> IntoDataFrame : # pragma: no cover
104102 import modin .pandas as mpd
105103 import pandas as pd
106104
107105 df = mpd .DataFrame (pd .DataFrame (obj ))
108- return cast ("NativeDataFrame " , df )
106+ return cast ("IntoDataFrame " , df )
109107
110108
111- def modin_pyarrow_constructor (obj : Data ) -> NativeDataFrame : # pragma: no cover
109+ def modin_pyarrow_constructor (obj : Data ) -> IntoDataFrame : # pragma: no cover
112110 import modin .pandas as mpd
113111 import pandas as pd
114112
115113 df = mpd .DataFrame (pd .DataFrame (obj )).convert_dtypes (dtype_backend = "pyarrow" )
116- return cast ("NativeDataFrame " , df )
114+ return cast ("IntoDataFrame " , df )
117115
118116
119- def cudf_constructor (obj : Data ) -> NativeDataFrame : # pragma: no cover
117+ def cudf_constructor (obj : Data ) -> IntoDataFrame : # pragma: no cover
120118 import cudf
121119
122120 df = cudf .DataFrame (obj )
123- return cast ("NativeDataFrame " , df )
121+ return cast ("IntoDataFrame " , df )
124122
125123
126124def polars_eager_constructor (obj : Data ) -> pl .DataFrame :
@@ -135,7 +133,7 @@ def polars_lazy_constructor(obj: Data) -> pl.LazyFrame:
135133 return pl .LazyFrame (obj )
136134
137135
138- def duckdb_lazy_constructor (obj : Data ) -> duckdb . DuckDBPyRelation :
136+ def duckdb_lazy_constructor (obj : Data ) -> NativeDuckDB :
139137 import duckdb
140138 import polars as pl
141139
@@ -145,16 +143,16 @@ def duckdb_lazy_constructor(obj: Data) -> duckdb.DuckDBPyRelation:
145143 return duckdb .table ("_df" )
146144
147145
148- def dask_lazy_p1_constructor (obj : Data ) -> NativeLazyFrame : # pragma: no cover
146+ def dask_lazy_p1_constructor (obj : Data ) -> NativeDask : # pragma: no cover
149147 import dask .dataframe as dd
150148
151- return cast ("NativeLazyFrame " , dd .from_dict (obj , npartitions = 1 ))
149+ return cast ("NativeDask " , dd .from_dict (obj , npartitions = 1 ))
152150
153151
154- def dask_lazy_p2_constructor (obj : Data ) -> NativeLazyFrame : # pragma: no cover
152+ def dask_lazy_p2_constructor (obj : Data ) -> NativeDask : # pragma: no cover
155153 import dask .dataframe as dd
156154
157- return cast ("NativeLazyFrame " , dd .from_dict (obj , npartitions = 2 ))
155+ return cast ("NativeDask " , dd .from_dict (obj , npartitions = 2 ))
158156
159157
160158def pyarrow_table_constructor (obj : dict [str , Any ]) -> pa .Table :
@@ -163,7 +161,7 @@ def pyarrow_table_constructor(obj: dict[str, Any]) -> pa.Table:
163161 return pa .table (obj )
164162
165163
166- def pyspark_lazy_constructor () -> Callable [[Data ], PySparkDataFrame ]: # pragma: no cover
164+ def pyspark_lazy_constructor () -> Callable [[Data ], NativePySpark ]: # pragma: no cover
167165 pytest .importorskip ("pyspark" )
168166 import warnings
169167 from atexit import register
@@ -178,22 +176,22 @@ def pyspark_lazy_constructor() -> Callable[[Data], PySparkDataFrame]: # pragma:
178176
179177 register (session .stop )
180178
181- def _constructor (obj : Data ) -> PySparkDataFrame :
179+ def _constructor (obj : Data ) -> NativePySpark :
182180 _obj = deepcopy (obj )
183181 index_col_name = generate_temporary_column_name (n_bytes = 8 , columns = list (_obj ))
184182 _obj [index_col_name ] = list (range (len (_obj [next (iter (_obj ))])))
185-
186- return (
183+ result = (
187184 session .createDataFrame ([* zip (* _obj .values ())], schema = [* _obj .keys ()])
188185 .repartition (2 )
189186 .orderBy (index_col_name )
190187 .drop (index_col_name )
191188 )
189+ return cast ("NativePySpark" , result )
192190
193191 return _constructor
194192
195193
196- def sqlframe_pyspark_lazy_constructor (obj : Data ) -> SQLFrameDataFrame : # pragma: no cover
194+ def sqlframe_pyspark_lazy_constructor (obj : Data ) -> NativeSQLFrame : # pragma: no cover
197195 session = sqlframe_session ()
198196 return session .createDataFrame ([* zip (* obj .values ())], schema = [* obj .keys ()])
199197
0 commit comments