Skip to content

Commit 85af320

Browse files
committed
feat(typing): DataFrame.from_native series typing!?!?!?!
Very related to #3086 (comment)
1 parent 0828bb9 commit 85af320

File tree

3 files changed

+38
-2
lines changed

3 files changed

+38
-2
lines changed

narwhals/_plan/arrow/typing.py

Lines changed: 12 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,6 @@
11
from __future__ import annotations
22

3-
from collections.abc import Callable, Mapping
3+
from collections.abc import Callable, Mapping, Sequence
44
from typing import TYPE_CHECKING, Any, Literal, Protocol, overload
55

66
from narwhals._typing_compat import TypeVar
@@ -23,10 +23,21 @@
2323
)
2424
from typing_extensions import TypeAlias
2525

26+
from narwhals.typing import NativeDataFrame, NativeSeries
27+
2628
StringScalar: TypeAlias = "Scalar[StringType | LargeStringType]"
2729
IntegerType: TypeAlias = "Int8Type | Int16Type | Int32Type | Int64Type | Uint8Type | Uint16Type | Uint32Type | Uint64Type"
2830
IntegerScalar: TypeAlias = "Scalar[IntegerType]"
2931

32+
class NativeArrowSeries(NativeSeries, Protocol):
33+
@property
34+
def chunks(self) -> list[Any]: ...
35+
36+
class NativeArrowDataFrame(NativeDataFrame, Protocol):
37+
def column(self, *args: Any, **kwds: Any) -> NativeArrowSeries: ...
38+
@property
39+
def columns(self) -> Sequence[NativeArrowSeries]: ...
40+
3041

3142
ScalarT = TypeVar("ScalarT", bound="pa.Scalar[Any]", default="pa.Scalar[Any]")
3243
ScalarPT_contra = TypeVar(

narwhals/_plan/dataframe.py

Lines changed: 14 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -23,8 +23,10 @@
2323
if TYPE_CHECKING:
2424
from collections.abc import Sequence
2525

26+
import pyarrow as pa
2627
from typing_extensions import Self
2728

29+
from narwhals._plan.arrow.typing import NativeArrowDataFrame
2830
from narwhals._plan.compliant.dataframe import CompliantDataFrame, CompliantFrame
2931

3032

@@ -47,6 +49,7 @@ def columns(self) -> list[str]:
4749
def __repr__(self) -> str: # pragma: no cover
4850
return generate_repr(f"nw.{type(self).__name__}", self.to_native().__repr__())
4951

52+
# TODO @dangotbanned: Can this be typed?
5053
def __init__(self, compliant: Any, /) -> None:
5154
self._compliant = compliant
5255

@@ -100,10 +103,20 @@ class DataFrame(
100103
def _series(self) -> type[Series[NativeSeriesT]]:
101104
return Series[NativeSeriesT]
102105

106+
@overload
107+
@classmethod
108+
def from_native(
109+
cls: type[DataFrame[Any, Any]], native: NativeArrowDataFrame, /
110+
) -> DataFrame[pa.Table, pa.ChunkedArray[Any]]: ...
111+
@overload
112+
@classmethod
113+
def from_native(
114+
cls: type[DataFrame[Any, Any]], native: NativeDataFrameT, /
115+
) -> DataFrame[NativeDataFrameT]: ...
103116
@classmethod
104117
def from_native(
105118
cls: type[DataFrame[Any, Any]], native: NativeDataFrameT, /
106-
) -> DataFrame[NativeDataFrameT]:
119+
) -> DataFrame[Any, Any]:
107120
if is_pyarrow_table(native):
108121
from narwhals._plan.arrow.dataframe import ArrowDataFrame
109122

tests/plan/compliant_test.py

Lines changed: 12 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -540,6 +540,7 @@ def test_row_is_py_literal(
540540

541541

542542
if TYPE_CHECKING:
543+
from typing_extensions import assert_type
543544

544545
def test_protocol_expr() -> None:
545546
"""Static test for all members implemented.
@@ -554,3 +555,14 @@ def test_protocol_expr() -> None:
554555
scalar = ArrowScalar()
555556
assert expr
556557
assert scalar
558+
559+
def test_dataframe_from_native_overloads() -> None:
560+
"""Ensure we can reveal the `NativeSeries` **without** a dependency."""
561+
data: dict[str, Any] = {}
562+
native_good = pa.table(data)
563+
result_good = nwp.DataFrame.from_native(native_good)
564+
assert_type(result_good, "nwp.DataFrame[pa.Table, pa.ChunkedArray[Any]]")
565+
566+
native_bad = native_good.to_batches()[0]
567+
nwp.DataFrame.from_native(native_bad) # type: ignore[call-overload]
568+
assert_type(native_bad, "pa.RecordBatch")

0 commit comments

Comments
 (0)