Skip to content
Merged
Show file tree
Hide file tree
Changes from 13 commits
Commits
Show all changes
59 commits
Select commit Hold shift + click to select a range
2c36eea
feat(typing): Make `Implementation` less opaque
dangotbanned Aug 20, 2025
2350dfc
ci(typing): fix pyright coverage
dangotbanned Aug 20, 2025
fe80d52
ci: Handle descriptors in API reference
dangotbanned Aug 20, 2025
123dc2e
Merge branch 'main' into implementation-typing
dangotbanned Aug 20, 2025
54bfbe4
Merge remote-tracking branch 'upstream/main' into implementation-typing
dangotbanned Aug 21, 2025
cadcdf0
cov
dangotbanned Aug 21, 2025
5b2bc62
add typing tests, tweak overloads
dangotbanned Aug 21, 2025
14974bc
refactor(typing): Switch most overloads to `BaseFrame`
dangotbanned Aug 21, 2025
685409c
feat(typing): Get basic `LazyFrame.implementation` working
dangotbanned Aug 21, 2025
0f83e44
kinda support dask
dangotbanned Aug 21, 2025
49a10bd
ci: try include `dask` in typing?
dangotbanned Aug 21, 2025
fd2b93e
aaaaand `modin` as well
dangotbanned Aug 21, 2025
618ce8c
feat(typing): `duckdb` & `sqlframe` work!
dangotbanned Aug 21, 2025
0606a14
kinda support `ibis`
dangotbanned Aug 21, 2025
37aaa69
test(typing): Simplify Any/Into, also test lazy
dangotbanned Aug 21, 2025
141b687
test(typing): Add `DataFrame.lazy` suite
dangotbanned Aug 21, 2025
cabedd4
refactor: Prepare for `Series` support
dangotbanned Aug 22, 2025
71c5163
extend this overload abomination
dangotbanned Aug 22, 2025
2b7945b
refactor: Move `_ImplDescriptor` to `_utils`
dangotbanned Aug 22, 2025
c573cfd
feat(typing): Add (new) `Series.implementation`
dangotbanned Aug 22, 2025
410b5bd
oop
dangotbanned Aug 22, 2025
e07cbc5
test(typing): Add `Series` tests
dangotbanned Aug 22, 2025
c4bceed
test: Redo everything, check collect as well
dangotbanned Aug 22, 2025
c8dbe07
Merge branch 'main' into implementation-typing
dangotbanned Aug 23, 2025
eaa43c1
docs: Ensure `BaseFrame.implementation` shows in api ref
dangotbanned Aug 23, 2025
5ef8103
fix(typing): `ibis`, `dask` work!!!
dangotbanned Aug 23, 2025
f55cb3a
fix(typing): Unbreak `modin`
dangotbanned Aug 23, 2025
811290c
test(typing): Check `mpd.Series` too
dangotbanned Aug 23, 2025
012c2bf
typo
dangotbanned Aug 23, 2025
b0694d0
fix `mpd.Series`
dangotbanned Aug 23, 2025
2a75529
chore: Add overload for pyspark
dangotbanned Aug 23, 2025
7d42972
simplify, add notes
dangotbanned Aug 23, 2025
fd736c6
Merge branch 'main' into implementation-typing
dangotbanned Aug 23, 2025
5d2f54f
Merge branch 'main' into implementation-typing
dangotbanned Aug 24, 2025
05d4115
rename, add brief doc to `_Implementation`
dangotbanned Aug 24, 2025
87d4439
refactor: Rename `NarwhalsObj` -> `Narwhals`
dangotbanned Aug 24, 2025
bee6984
tighten up `Narwhals` w/ `Compliant`
dangotbanned Aug 24, 2025
1c68c68
docs(typing): Add `Narwhals` explainer
dangotbanned Aug 24, 2025
fcafec6
docs: Add crossref to `Implementation`
dangotbanned Aug 24, 2025
7157bbd
refactor: shrinking
dangotbanned Aug 24, 2025
08d900c
Merge branch 'main' into implementation-typing
dangotbanned Aug 24, 2025
b2aaf0d
Merge branch 'main' into implementation-typing
dangotbanned Aug 25, 2025
635b5a8
Merge branch 'main' into implementation-typing
dangotbanned Aug 25, 2025
5049a2a
docs: Explain typing test structure
dangotbanned Aug 26, 2025
4b78837
Update narwhals/_utils.py
dangotbanned Aug 26, 2025
29daf5e
Merge branch 'main' into implementation-typing
dangotbanned Aug 26, 2025
f6da9ce
Merge branch 'main' into implementation-typing
dangotbanned Aug 27, 2025
fe21d09
Merge remote-tracking branch 'upstream/main' into implementation-typing
dangotbanned Aug 27, 2025
a94a0f8
test(typing): Update for (#3032)
dangotbanned Aug 27, 2025
884d135
Merge branch 'main' into implementation-typing
dangotbanned Aug 28, 2025
4ad081c
Merge branch 'main' into implementation-typing
dangotbanned Aug 28, 2025
791ecae
ci: Exclude `OrderedDict` methods from `check-api-reference`
dangotbanned Aug 28, 2025
a3bd3ac
Merge branch 'main' into implementation-typing
dangotbanned Aug 28, 2025
043b9d1
Merge branch 'main' into implementation-typing
dangotbanned Aug 28, 2025
6b59ed9
Merge branch 'main' into implementation-typing
dangotbanned Aug 29, 2025
919b22f
Merge remote-tracking branch 'upstream/main' into implementation-typing
dangotbanned Aug 29, 2025
5e2838e
Merge branch 'main' into implementation-typing
dangotbanned Aug 30, 2025
6bc2c47
Merge branch 'main' into implementation-typing
dangotbanned Sep 2, 2025
21800fe
ci: Try adding `--group 'typing-ci'`
dangotbanned Sep 3, 2025
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
22 changes: 15 additions & 7 deletions narwhals/_namespace.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,7 @@
Literal,
Protocol,
TypeVar,
cast,
overload,
)

Expand Down Expand Up @@ -37,8 +38,6 @@
import pandas as pd
import polars as pl
import pyarrow as pa
import pyspark.sql as pyspark_sql
from pyspark.sql.connect.dataframe import DataFrame as PySparkConnectDataFrame
from typing_extensions import Self, TypeAlias, TypeIs

from narwhals._arrow.namespace import ArrowNamespace
Expand Down Expand Up @@ -91,7 +90,7 @@ class _BasePandasLikeFrame(NativeDataFrame, _BasePandasLike, Protocol): ...
class _BasePandasLikeSeries(NativeSeries, _BasePandasLike, Protocol):
def where(self, cond: Any, other: Any = ..., **kwds: Any) -> Any: ...

class _NativeDask(Protocol):
class _NativeDask(NativeLazyFrame, Protocol):
Copy link
Member Author

@dangotbanned dangotbanned Aug 24, 2025

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Follow-up PR

Loooooooooong overdue at this stage, but I'm gonna move most of this stuff and some others into a new narwhals._native module which has:

Protocols and aliases like:

# NOTE: Using `pyspark.sql.DataFrame` creates false positives in overloads when not installed
class _PySparkDataFrame(NativeLazyFrame, Protocol):
# Not on sqlframe classes
# Insane method name that no other framework would clobber
# https://github.com/apache/spark/blob/8530444e25b83971da4314c608aa7d763adeceb3/python/pyspark/sql/dataframe.py#L4875
def dropDuplicatesWithinWatermark(self, *arg: Any, **kwargs: Any) -> Any: ... # noqa: N802
_NativePolars: TypeAlias = "pl.DataFrame | pl.LazyFrame | pl.Series"
_NativeArrow: TypeAlias = "pa.Table | pa.ChunkedArray[Any]"
_NativeDuckDB: TypeAlias = "duckdb.DuckDBPyRelation"

Their corresponding new and re-aliased guards like:

def is_native_dask(obj: Any) -> TypeIs[_NativeDask]:
return is_dask_dataframe(obj)
is_native_duckdb: _Guard[_NativeDuckDB] = is_duckdb_relation
is_native_sqlframe: _Guard[_NativeSQLFrame] = is_sqlframe_dataframe

And the typing.Native* protocols as well:

# All dataframes supported by Narwhals have a
# `columns` property. Their similarities don't extend
# _that_ much further unfortunately...
class NativeFrame(Protocol):
@property
def columns(self) -> Any: ...
def join(self, *args: Any, **kwargs: Any) -> Any: ...
class NativeDataFrame(Sized, NativeFrame, Protocol): ...
class NativeLazyFrame(NativeFrame, Protocol):
def explain(self, *args: Any, **kwargs: Any) -> Any: ...
class NativeSeries(Sized, Iterable[Any], Protocol):
def filter(self, *args: Any, **kwargs: Any) -> Any: ...


Beyond just organizing things, it'll mean we can deduplicate the definitions that appear in 3x typing modules πŸ˜…

Copy link
Member Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

The type used for session in (#3032 (comment)) would also make sense to be defined in this new module

_partition_type: type[pd.DataFrame]

class _CuDFDataFrame(_BasePandasLikeFrame, Protocol):
Expand All @@ -112,6 +111,13 @@ class _ModinDataFrame(_BasePandasLikeFrame, Protocol):
class _ModinSeries(_BasePandasLikeSeries, Protocol):
_pandas_class: type[pd.Series[Any]]

# NOTE: Using `pyspark.sql.DataFrame` creates false positives in overloads when not installed
class _PySparkDataFrame(NativeLazyFrame, Protocol):
# Not on sqlframe classes
# Insane method name that no other framework would clobber
# https://github.com/apache/spark/blob/8530444e25b83971da4314c608aa7d763adeceb3/python/pyspark/sql/dataframe.py#L4875
def dropDuplicatesWithinWatermark(self, *arg: Any, **kwargs: Any) -> Any: ... # noqa: N802

_NativePolars: TypeAlias = "pl.DataFrame | pl.LazyFrame | pl.Series"
_NativeArrow: TypeAlias = "pa.Table | pa.ChunkedArray[Any]"
_NativeDuckDB: TypeAlias = "duckdb.DuckDBPyRelation"
Expand All @@ -124,8 +130,8 @@ class _ModinSeries(_BasePandasLikeSeries, Protocol):
)
_NativePandasLike: TypeAlias = "_NativePandasLikeDataFrame |_NativePandasLikeSeries"
_NativeSQLFrame: TypeAlias = "SQLFrameDataFrame"
_NativePySpark: TypeAlias = "pyspark_sql.DataFrame"
_NativePySparkConnect: TypeAlias = "PySparkConnectDataFrame"
_NativePySpark: TypeAlias = _PySparkDataFrame
_NativePySparkConnect: TypeAlias = _PySparkDataFrame
_NativeSparkLike: TypeAlias = (
"_NativeSQLFrame | _NativePySpark | _NativePySparkConnect"
)
Expand Down Expand Up @@ -371,8 +377,10 @@ def is_native_dask(obj: Any) -> TypeIs[_NativeDask]:

is_native_duckdb: _Guard[_NativeDuckDB] = is_duckdb_relation
is_native_sqlframe: _Guard[_NativeSQLFrame] = is_sqlframe_dataframe
is_native_pyspark: _Guard[_NativePySpark] = is_pyspark_dataframe
is_native_pyspark_connect: _Guard[_NativePySparkConnect] = is_pyspark_connect_dataframe
is_native_pyspark = cast("_Guard[_NativePySpark]", is_pyspark_dataframe)
is_native_pyspark_connect = cast(
"_Guard[_NativePySparkConnect]", is_pyspark_connect_dataframe
)


def is_native_pandas(obj: Any) -> TypeIs[_NativePandas]:
Expand Down
133 changes: 93 additions & 40 deletions narwhals/dataframe.py
Original file line number Diff line number Diff line change
Expand Up @@ -71,8 +71,34 @@

from narwhals._compliant import CompliantDataFrame, CompliantLazyFrame
from narwhals._compliant.typing import CompliantExprAny, EagerNamespaceAny
from narwhals._namespace import (
_CuDFDataFrame,
_ModinDataFrame,
_NativeDask,
_NativeDuckDB,
_NativePandasLikeDataFrame,
_NativeSQLFrame,
)
from narwhals._translate import IntoArrowTable
from narwhals._typing import Dask, DuckDB, EagerAllowed, Ibis, IntoBackend, Polars
from narwhals._typing import (
Dask,
DuckDB,
EagerAllowed,
Ibis,
IntoBackend,
Polars,
_ArrowImpl,
_CudfImpl,
_DaskImpl,
_DuckDBImpl,
_EagerAllowedImpl,
_LazyAllowedImpl,
_ModinImpl,
_PandasImpl,
_PandasLikeImpl,
_PolarsImpl,
_SQLFrameImpl,
)
from narwhals.group_by import GroupBy, LazyGroupBy
from narwhals.typing import (
AsofJoinStrategy,
Expand Down Expand Up @@ -104,10 +130,76 @@
MultiIndexSelector: TypeAlias = "_MultiIndexSelector[Series[Any]]"


class _ImplDescriptor:
def __set_name__(self, owner: type[Any], name: str) -> None:
self.__name__: str = name

@overload
def __get__(
self, instance: DataFrame[pl.DataFrame] | LazyFrame[pl.LazyFrame], owner: Any
) -> _PolarsImpl: ...
@overload
def __get__(self, instance: BaseFrame[pd.DataFrame], owner: Any) -> _PandasImpl: ...
@overload
def __get__(self, instance: BaseFrame[_ModinDataFrame], owner: Any) -> _ModinImpl: ...

@overload # oof, looks like these two need their names aligned πŸ˜…
def __get__(self, instance: BaseFrame[_CuDFDataFrame], owner: Any) -> _CudfImpl: ...
@overload
def __get__(
self, instance: BaseFrame[_NativePandasLikeDataFrame], owner: Any
) -> _PandasLikeImpl: ...
@overload
def __get__(self, instance: BaseFrame[pa.Table], owner: Any) -> _ArrowImpl: ...
@overload
def __get__(
self, instance: BaseFrame[pl.DataFrame | pd.DataFrame | pa.Table], owner: Any
) -> _PolarsImpl | _PandasImpl | _ArrowImpl: ...
@overload
def __get__(self, instance: LazyFrame[_NativeDuckDB], owner: Any) -> _DuckDBImpl: ...
@overload
def __get__(
self, instance: LazyFrame[_NativeSQLFrame], owner: Any
) -> _SQLFrameImpl: ...
@overload
def __get__(self, instance: LazyFrame[_NativeDask], owner: Any) -> _DaskImpl: ...
@overload
def __get__(self, instance: None, owner: Any) -> Self: ...
@overload
def __get__(self, instance: DataFrame[Any], owner: Any) -> _EagerAllowedImpl: ...
@overload
def __get__(self, instance: LazyFrame[Any], owner: Any) -> _LazyAllowedImpl: ...
def __get__(self, instance: Any | None, owner: Any) -> Any:
if instance is None: # pragma: no cover
return self
return instance._compliant_frame._implementation


class BaseFrame(Generic[_FrameT]):
_compliant_frame: Any
_level: Literal["full", "lazy", "interchange"]

implementation: _ImplDescriptor = _ImplDescriptor()
"""Return implementation of native frame.

This can be useful when you need to use special-casing for features outside of
Narwhals' scope - for example, when dealing with pandas' Period Dtype.
Comment on lines +112 to +113
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

True (and I know this was the description that was already here) - but this is not the only case in which .implementation is useful, e.g. nw.new_series or DataFrame.from_dict require a backend to be provided. It would be nice to display one of these. In their docs the case in the example is static, and does not depend on some other input

Copy link
Member Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Yeah I agree tbf, I also thought the first line could do with some tweaking

"""Return [`narwhals.Implementation`][] of native frame.

If you suggest something, I'm 95% sure I'll accept it πŸ˜…

Copy link
Member Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Could we look at improving these docs in a follow-up?

Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Yes sure! Sorry I didn't mean it as a blocker


Examples:
>>> import narwhals as nw
>>> import pandas as pd
>>> df_native = pd.DataFrame({"a": [1, 2, 3]})
>>> df = nw.from_native(df_native)
>>> df.implementation
<Implementation.PANDAS: 'pandas'>
>>> df.implementation.is_pandas()
True
>>> df.implementation.is_pandas_like()
True
>>> df.implementation.is_polars()
False
"""

def __native_namespace__(self) -> ModuleType:
return self._compliant_frame.__native_namespace__() # type: ignore[no-any-return]

Expand Down Expand Up @@ -660,29 +752,6 @@ def from_numpy(
)
raise ValueError(msg)

@property
def implementation(self) -> Implementation:
"""Return implementation of native frame.

This can be useful when you need to use special-casing for features outside of
Narwhals' scope - for example, when dealing with pandas' Period Dtype.

Examples:
>>> import narwhals as nw
>>> import pandas as pd
>>> df_native = pd.DataFrame({"a": [1, 2, 3]})
>>> df = nw.from_native(df_native)
>>> df.implementation
<Implementation.PANDAS: 'pandas'>
>>> df.implementation.is_pandas()
True
>>> df.implementation.is_pandas_like()
True
>>> df.implementation.is_polars()
False
"""
return self._compliant_frame._implementation

def __len__(self) -> int:
return self._compliant_frame.__len__()

Expand Down Expand Up @@ -2295,22 +2364,6 @@ def __init__(self, df: Any, *, level: Literal["full", "lazy", "interchange"]) ->
def __repr__(self) -> str: # pragma: no cover
return generate_repr("Narwhals LazyFrame", self.to_native().__repr__())

@property
def implementation(self) -> Implementation:
"""Return implementation of native frame.

This can be useful when you need to use special-casing for features outside of
Narwhals' scope - for example, when dealing with pandas' Period Dtype.

Examples:
>>> import narwhals as nw
>>> import dask.dataframe as dd
>>> lf_native = dd.from_dict({"a": [1, 2]}, npartitions=1)
>>> nw.from_native(lf_native).implementation
<Implementation.DASK: 'dask'>
"""
return self._compliant_frame._implementation

def __getitem__(self, item: str | slice) -> NoReturn:
msg = "Slicing is not supported on LazyFrame"
raise TypeError(msg)
Expand Down
2 changes: 1 addition & 1 deletion pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -74,7 +74,7 @@ typing = [ # keep some of these pinned and bump periodically so there's fewer s
"sqlframe",
"polars==1.32.2",
"uv",
"narwhals[ibis]",
"narwhals[ibis,dask,modin]",
]
docs = [
"black", # required by mkdocstrings_handlers
Expand Down
11 changes: 2 additions & 9 deletions tests/expr_and_series/is_close_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,6 @@
import pytest

import narwhals as nw
from narwhals._utils import is_eager_allowed
from narwhals.exceptions import ComputeError, InvalidOperationError
from tests.conftest import (
dask_lazy_p1_constructor,
Expand Down Expand Up @@ -114,11 +113,8 @@ def test_is_close_series_with_series(
) -> None:
df = nw.from_native(constructor_eager(data), eager_only=True)
x, y = df["x"], df["y"]
backend = df.implementation
assert is_eager_allowed(backend)
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

πŸ₯³


nulls = nw.new_series(
name="nulls", values=[None] * len(x), dtype=nw.Float64(), backend=backend
"nulls", [None] * len(x), nw.Float64(), backend=df.implementation
)
x = x.zip_with(x != NAN_PLACEHOLDER, x**0.5).zip_with(x != NULL_PLACEHOLDER, nulls)
y = y.zip_with(y != NAN_PLACEHOLDER, y**0.5).zip_with(y != NULL_PLACEHOLDER, nulls)
Expand All @@ -141,11 +137,8 @@ def test_is_close_series_with_scalar(
) -> None:
df = nw.from_native(constructor_eager(data), eager_only=True)
y = df["y"]
backend = df.implementation
assert is_eager_allowed(backend)

nulls = nw.new_series(
name="nulls", values=[None] * len(y), dtype=nw.Float64(), backend=backend
"nulls", [None] * len(y), nw.Float64(), backend=df.implementation
)
y = y.zip_with(y != NAN_PLACEHOLDER, y**0.5).zip_with(y != NULL_PLACEHOLDER, nulls)
result = y.is_close(other, abs_tol=abs_tol, rel_tol=rel_tol, nans_equal=nans_equal)
Expand Down
97 changes: 97 additions & 0 deletions tests/implementation_test.py
Original file line number Diff line number Diff line change
@@ -1,9 +1,27 @@
from __future__ import annotations

# Using pyright's assert type instead
# mypy: disable-error-code="assert-type"
from typing import TYPE_CHECKING, Any, cast

import pytest

import narwhals as nw

if TYPE_CHECKING:
from narwhals._typing import (
_ArrowImpl,
_DaskImpl,
_DuckDBImpl,
_EagerAllowedImpl,
_LazyAllowedImpl,
_ModinImpl,
_PandasImpl,
_PolarsImpl,
_SQLFrameImpl,
)
from narwhals.typing import IntoDataFrame


def test_implementation_pandas() -> None:
pytest.importorskip("pandas")
Expand Down Expand Up @@ -52,3 +70,82 @@ def test_implementation_polars() -> None:
)
def test_implementation_new(member: str, value: str) -> None:
assert nw.Implementation(value) is getattr(nw.Implementation, member)


if TYPE_CHECKING:

def test_implementation_typing() -> None: # noqa: PLR0914
import dask.dataframe as dd
import modin.pandas as mpd
import pandas as pd
import polars as pl
import pyarrow as pa
from typing_extensions import assert_type

from tests.conftest import (
duckdb_lazy_constructor,
sqlframe_pyspark_lazy_constructor,
)

data: dict[str, Any] = {"a": [1, 2, 3]}
polars_df = nw.from_native(pl.DataFrame(data))
polars_ldf = nw.from_native(pl.LazyFrame(data))
pandas_df = nw.from_native(pd.DataFrame(data))
arrow_df = nw.from_native(pa.table(data))
duckdb_ldf = nw.from_native(duckdb_lazy_constructor(data))
sqlframe_ldf = nw.from_native(sqlframe_pyspark_lazy_constructor(data))

polars_impl = polars_df.implementation
lazy_polars_impl = polars_ldf.implementation
pandas_impl = pandas_df.implementation
arrow_impl = arrow_df.implementation
duckdb_impl = duckdb_ldf.implementation
sqlframe_impl = sqlframe_ldf.implementation

assert_type(polars_impl, _PolarsImpl)
assert_type(lazy_polars_impl, _PolarsImpl)
# NOTE: Testing the lazy versions of pandas/pyarrow would require adding overloads to `DataFrame.lazy`
# Currently, everything becomes `LazyFrame[Any]`
assert_type(pandas_impl, _PandasImpl)
assert_type(arrow_impl, _ArrowImpl)
assert_type(duckdb_impl, _DuckDBImpl)
assert_type(sqlframe_impl, _SQLFrameImpl)

modin_native = mpd.DataFrame.from_dict(data)
modin_df = nw.from_native(modin_native)
modin_impl = modin_df.implementation
# TODO @dangotbanned: Is this even possible?
# - `mypy` won't ever work, treats as `Any`
# - `pyright` can resolve `modin_df: narwhals.dataframe.DataFrame[modin.pandas.dataframe.DataFrame]`
# - But we run into variance issues if trying to widen the concrete type again
assert_type(modin_impl, _ModinImpl) # pyright: ignore[reportAssertTypeFailure]
# If ^^^ can be fixed, the next one should be removed
assert_type(modin_impl, _EagerAllowedImpl)

# NOTE: Constructor returns `Unknown`
dask_native = cast("dd.DataFrame", dd.DataFrame.from_dict(data))
dask_ldf = nw.from_native(dask_native)
dask_impl = dask_ldf.implementation
# NOTE: Same issue as modin
assert_type(dask_impl, _DaskImpl) # pyright: ignore[reportAssertTypeFailure]
# If ^^^ can be fixed, the next one should be removed
assert_type(dask_impl, _LazyAllowedImpl)

can_lazyframe_collect_dfs: list[
nw.DataFrame[pl.DataFrame]
| nw.DataFrame[pd.DataFrame]
| nw.DataFrame[pa.Table]
] = [polars_df, pandas_df, arrow_df]
can_lazyframe_collect_impl = can_lazyframe_collect_dfs[0].implementation
assert_type(can_lazyframe_collect_impl, _PolarsImpl | _PandasImpl | _ArrowImpl)

very_lost_df = nw.DataFrame.__new__(nw.DataFrame)
very_lost_impl = very_lost_df.implementation
# TODO @dangotbanned: Is this so bad?
# - Currently `DataFrame[Any]` matches the first overload (`_PolarsImpl`)
# - That is accepted **everywhere** that uses `IntoBackend`
assert_type(very_lost_impl, _EagerAllowedImpl) # pyright: ignore[reportAssertTypeFailure]

not_so_lost_df = nw.DataFrame.__new__(nw.DataFrame[IntoDataFrame])
not_so_lost_impl = not_so_lost_df.implementation
assert_type(not_so_lost_impl, _EagerAllowedImpl)
2 changes: 1 addition & 1 deletion tests/namespace_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -81,7 +81,7 @@ def test_namespace_from_native_object(constructor: Constructor) -> None:
def test_namespace_from_native_object_invalid() -> None:
data = {"a": [1, 2, 3], "b": [4, 5, 6]}
with pytest.raises(TypeError, match=r"dict"):
Namespace.from_native_object(data) # pyright: ignore[reportCallIssue, reportArgumentType]
Namespace.from_native_object(data) # type: ignore[call-overload]


@eager_allowed
Expand Down
Loading
Loading