Skip to content

Commit 6c7b475

Browse files
authored
Merge pull request #250 from numpy/stubtest-fixes-lib
🏷️ fix remaining stubtest errors in `numpy.lib`
2 parents 0a477a5 + 121e951 commit 6c7b475

File tree

9 files changed

+320
-126
lines changed

9 files changed

+320
-126
lines changed

.mypyignore-todo

Lines changed: 0 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -15,15 +15,6 @@ numpy._pyinstaller.hook-numpy
1515
numpy.ctypeslib._ctypeslib
1616
numpy.fft.helper
1717

18-
numpy(\.lib\._polynomial_impl|\.matlib)?\.poly1d\.integ
19-
numpy.lib(._arrayterator_impl)?.Arrayterator.__array__
20-
numpy.lib.(_array_utils_impl|array_utils).normalize_axis_tuple
21-
numpy.lib._iotools.NameValidator.defaultdeletechars
22-
numpy.lib.format.open_memmap
23-
numpy.lib.format.read_array(_header_(1|2)_0)?
24-
numpy.lib.mixins.NDArrayOperatorsMixin.__array_ufunc__
25-
numpy.lib.recfunctions.unstructured_to_structured
26-
2718
numpy.distutils
2819

2920
numpy.f2py.__main__

src/numpy-stubs/__init__.pyi

Lines changed: 5 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -876,6 +876,10 @@ _BinOperandComplex128_co: TypeAlias = complex | floating[_64Bit] | integer[_64Bi
876876
_ToReal: TypeAlias = float | CanComplex | CanFloat | CanIndex
877877
_ToImag: TypeAlias = float | CanFloat | CanIndex
878878

879+
_DTypeDescr: TypeAlias = (
880+
list[tuple[str, str]] | list[tuple[str, str, tuple[int, ...]]] | list[tuple[str, str] | tuple[str, str, tuple[int, ...]]]
881+
)
882+
879883
###
880884
# TypedDict's (for internal use only)
881885

@@ -1014,7 +1018,7 @@ class dtype(Generic[_ScalarT_co], metaclass=_DTypeMeta):
10141018
@property
10151019
def char(self) -> _DTypeChar: ...
10161020
@property
1017-
def descr(self) -> list[tuple[LiteralString, LiteralString] | tuple[LiteralString, LiteralString, tuple[int, ...]]]: ...
1021+
def descr(self) -> _DTypeDescr: ...
10181022
@property
10191023
def fields(self) -> MappingProxyType[LiteralString, tuple[dtype[Any], int] | tuple[dtype[Any], int, Any]] | None: ...
10201024
@property
Lines changed: 21 additions & 11 deletions
Original file line numberDiff line numberDiff line change
@@ -1,20 +1,30 @@
1-
from collections.abc import Iterable
2-
from typing import Any
3-
4-
import numpy as np
5-
import numpy.typing as npt
1+
from collections.abc import Iterable, Mapping
2+
from typing import Any, Protocol, type_check_only
63

74
__all__ = ["byte_bounds", "normalize_axis_index", "normalize_axis_tuple"]
85

6+
###
7+
8+
@type_check_only
9+
class _HasSizeAndArrayInterface(Protocol):
10+
@property
11+
def size(self, /) -> int: ...
12+
@property # `TypedDict` cannot be used because it rejects `dict[str, Any]`
13+
def __array_interface__(self, /) -> Mapping[str, Any]: ...
14+
15+
###
16+
917
# NOTE: In practice `byte_bounds` can (potentially) take any object
1018
# implementing the `__array_interface__` protocol. The caveat is
1119
# that certain keys, marked as optional in the spec, must be present for
1220
# `byte_bounds`. This concerns `"strides"` and `"data"`.
13-
def byte_bounds(a: np.generic | npt.NDArray[Any]) -> tuple[int, int]: ...
14-
def normalize_axis_index(axis: int = ..., ndim: int = ..., msg_prefix: str | None = ...) -> int: ...
21+
def byte_bounds(a: _HasSizeAndArrayInterface) -> tuple[int, int]: ...
22+
23+
###
24+
def normalize_axis_index(axis: int, ndim: int, msg_prefix: str | None = None) -> int: ...
1525
def normalize_axis_tuple(
1626
axis: int | Iterable[int],
17-
ndim: int = ...,
18-
argname: str | None = ...,
19-
allow_duplicate: bool | None = ...,
20-
) -> tuple[int, int]: ...
27+
ndim: int,
28+
argname: str | None = None,
29+
allow_duplicate: bool = False,
30+
) -> tuple[int, ...]: ...

src/numpy-stubs/lib/_arrayterator_impl.pyi

Lines changed: 8 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -2,7 +2,7 @@
22

33
from collections.abc import Generator
44
from types import EllipsisType
5-
from typing import Any, Final, Generic, TypeAlias
5+
from typing import Any, Final, Generic, TypeAlias, overload
66
from typing_extensions import TypeVar
77

88
import numpy as np
@@ -12,6 +12,7 @@ __all__ = ["Arrayterator"]
1212
###
1313

1414
_ShapeT_co = TypeVar("_ShapeT_co", bound=tuple[int, ...], default=tuple[int, ...], covariant=True)
15+
_DTypeT = TypeVar("_DTypeT", bound=np.dtype[Any])
1516
_DTypeT_co = TypeVar("_DTypeT_co", bound=np.dtype[Any], default=np.dtype[Any], covariant=True)
1617
_ScalarT = TypeVar("_ScalarT", bound=np.generic)
1718

@@ -37,3 +38,9 @@ class Arrayterator(np.ndarray[_ShapeT_co, _DTypeT_co], Generic[_ShapeT_co, _DTyp
3738
def __init__(self, /, var: np.ndarray[_ShapeT_co, _DTypeT_co], buf_size: int | None = None) -> None: ... # pyright: ignore[reportInconsistentConstructor]
3839
def __getitem__(self, index: _AnyIndex, /) -> Arrayterator[tuple[int, ...], _DTypeT_co]: ... # type: ignore[override]
3940
def __iter__(self) -> Generator[np.ndarray[tuple[int, ...], _DTypeT_co]]: ...
41+
42+
#
43+
@overload # type: ignore[override]
44+
def __array__(self, /, dtype: _DTypeT_co | None = None, copy: bool | None = None) -> np.ndarray[_ShapeT_co, _DTypeT_co]: ...
45+
@overload
46+
def __array__(self, /, dtype: _DTypeT, copy: bool | None = None) -> np.ndarray[_ShapeT_co, _DTypeT]: ...

src/numpy-stubs/lib/_iotools.pyi

Lines changed: 12 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,5 @@
1-
from collections.abc import Callable, Iterable, Sequence
2-
from typing import Any, ClassVar, Final, Literal, TypedDict, overload, type_check_only
1+
from collections.abc import Callable, Iterable
2+
from typing import Any, ClassVar, Final, Literal as L, TypedDict, overload, type_check_only
33
from typing_extensions import TypeVar, Unpack
44

55
import numpy as np
@@ -11,7 +11,7 @@ _T = TypeVar("_T")
1111
class _ValidationKwargs(TypedDict, total=False):
1212
excludelist: Iterable[str] | None
1313
deletechars: Iterable[str] | None
14-
case_sensitive: Literal["upper", "lower"] | bool | None
14+
case_sensitive: L["upper", "lower"] | bool | None
1515
replace_space: str
1616

1717
###
@@ -39,8 +39,8 @@ class LineSplitter:
3939
def autostrip(self, /, method: Callable[[_T], Iterable[str]]) -> Callable[[_T], list[str]]: ...
4040

4141
class NameValidator:
42-
defaultexcludelist: ClassVar[Sequence[str]]
43-
defaultdeletechars: ClassVar[Sequence[str]]
42+
defaultexcludelist: ClassVar[list[L["return", "file", "print"]]]
43+
defaultdeletechars: ClassVar[set[str]]
4444
excludelist: list[str]
4545
deletechars: set[str]
4646
case_converter: Callable[[str], str]
@@ -51,7 +51,7 @@ class NameValidator:
5151
/,
5252
excludelist: Iterable[str] | None = None,
5353
deletechars: Iterable[str] | None = None,
54-
case_sensitive: Literal["upper", "lower"] | bool | None = None,
54+
case_sensitive: L["upper", "lower"] | bool | None = None,
5555
replace_space: str = "_",
5656
) -> None: ...
5757
def __call__(self, /, names: Iterable[str], defaultfmt: str = "f%i", nbfields: int | None = None) -> tuple[str, ...]: ...
@@ -90,16 +90,20 @@ class StringConverter:
9090
def upgrade_mapper(cls, func: Callable[[str], Any], default: object | None = None) -> None: ...
9191

9292
@overload
93-
def str2bool(value: Literal["false", "False", "FALSE"]) -> Literal[False]: ...
93+
def str2bool(value: L["false", "False", "FALSE"]) -> L[False]: ...
9494
@overload
95-
def str2bool(value: Literal["true", "True", "TRUE"]) -> Literal[True]: ...
95+
def str2bool(value: L["true", "True", "TRUE"]) -> L[True]: ...
9696

9797
#
9898
def has_nested_fields(ndtype: np.dtype[np.void]) -> bool: ...
99+
100+
#
99101
def flatten_dtype(
100102
ndtype: np.dtype[np.void],
101103
flatten_base: bool = False,
102104
) -> type[np.dtype[np.bool | np.number | np.character | np.object_ | np.datetime64 | np.timedelta64]]: ...
105+
106+
#
103107
def easy_dtype(
104108
ndtype: npt.DTypeLike,
105109
names: Iterable[str] | None = None,

src/numpy-stubs/lib/_polynomial_impl.pyi

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -249,7 +249,7 @@ class poly1d(Generic[_ScalarT_co]):
249249
def deriv(self: poly1d[np.object_], /, m: _ToInt = 1) -> poly1d[np.object_]: ...
250250

251251
#
252-
def integ(self, /, m: _ToInt = 1, k: CoComplex_0d | CoComplex_1d | ToObject_1d | None = None) -> poly1d: ...
252+
def integ(self, /, m: _ToInt = 1, k: CoComplex_0d | CoComplex_1d | ToObject_1d | None = 0) -> poly1d: ...
253253

254254
###
255255

src/numpy-stubs/lib/format.pyi

Lines changed: 159 additions & 35 deletions
Original file line numberDiff line numberDiff line change
@@ -1,39 +1,163 @@
1-
from _typeshed import Incomplete
2-
from typing import Final, Literal
1+
import io
2+
import os
3+
from _typeshed import SupportsRead, SupportsWrite
4+
from collections.abc import Mapping, Sequence
5+
from typing import Any, Final, Literal as L, TypeAlias, TypeGuard, TypedDict, overload, type_check_only
6+
from typing_extensions import TypeVar
7+
8+
import numpy as np
9+
from numpy import _AnyShapeT, _DTypeDescr # noqa: ICN003
10+
from numpy._typing import DTypeLike, _DTypeLike
11+
12+
from ._utils_impl import drop_metadata as drop_metadata
313

414
__all__: list[str] = []
515

6-
EXPECTED_KEYS: Final[set[str]]
7-
MAGIC_PREFIX: Final[bytes]
8-
MAGIC_LEN: Literal[8]
9-
ARRAY_ALIGN: Literal[64]
10-
BUFFER_SIZE: Literal[262144] # 2**18
11-
GROWTH_AXIS_MAX_DIGITS: Literal[21]
12-
13-
def magic(major: Incomplete, minor: Incomplete) -> Incomplete: ...
14-
def read_magic(fp: Incomplete) -> Incomplete: ...
15-
def dtype_to_descr(dtype: Incomplete) -> Incomplete: ...
16-
def descr_to_dtype(descr: Incomplete) -> Incomplete: ...
17-
def header_data_from_array_1_0(array: Incomplete) -> Incomplete: ...
18-
def write_array_header_1_0(fp: Incomplete, d: Incomplete) -> Incomplete: ...
19-
def write_array_header_2_0(fp: Incomplete, d: Incomplete) -> Incomplete: ...
20-
def read_array_header_1_0(fp: Incomplete) -> Incomplete: ...
21-
def read_array_header_2_0(fp: Incomplete) -> Incomplete: ...
22-
def write_array(
23-
fp: Incomplete,
24-
array: Incomplete,
25-
version: Incomplete = ...,
26-
allow_pickle: Incomplete = ...,
27-
pickle_kwargs: Incomplete = ...,
28-
) -> Incomplete: ...
29-
def read_array(fp: Incomplete, allow_pickle: Incomplete = ..., pickle_kwargs: Incomplete = ...) -> Incomplete: ...
16+
###
17+
18+
_ScalarT = TypeVar("_ScalarT", bound=np.generic)
19+
20+
_ToDescr: TypeAlias = str | Sequence[tuple[str, str] | tuple[str, str, tuple[int, ...]]]
21+
_HeaderVersion: TypeAlias = tuple[L[1, 2, 3], L[0]]
22+
_MemmapMode: TypeAlias = L["r", "c", "r+", "w+"]
23+
_ArrayHeader: TypeAlias = tuple[tuple[int, ...], bool, np.dtype[Any]]
24+
25+
@type_check_only
26+
class _HeaderDict_1_0(TypedDict):
27+
shape: tuple[int, ...]
28+
fortran_order: bool
29+
descr: _DTypeDescr
30+
31+
###
32+
33+
EXPECTED_KEYS: Final[set[str]] = ...
34+
MAGIC_PREFIX: Final = b"\x93NUMPY"
35+
MAGIC_LEN: Final = 16
36+
ARRAY_ALIGN: Final = 64
37+
BUFFER_SIZE: Final = 262_144 # 2**18
38+
GROWTH_AXIS_MAX_DIGITS: Final = 21
39+
_MAX_HEADER_SIZE: Final = 10_000
40+
41+
#
42+
def _check_version(version: _HeaderVersion | None) -> None: ...
43+
def _filter_header(s: str) -> str: ...
44+
def _wrap_header(header: str, version: _HeaderVersion) -> bytes: ...
45+
def _wrap_header_guess_version(header: str) -> bytes: ...
46+
def _read_bytes(fp: SupportsRead[bytes], size: int, error_template: str = "ran out of data") -> bytes: ...
47+
48+
# NOTE: Don't use `TypeIs` here: It might still be of this IO type if `False` is returned
49+
def isfileobj(f: object) -> TypeGuard[io.FileIO | io.BufferedReader | io.BufferedWriter]: ...
50+
51+
#
52+
def magic(major: int, minor: int) -> bytes: ...
53+
def read_magic(fp: SupportsRead[bytes]) -> tuple[int, int]: ...
54+
55+
#
56+
def dtype_to_descr(dtype: np.dtype[Any]) -> _DTypeDescr | str: ...
57+
def descr_to_dtype(descr: _ToDescr) -> np.dtype[Any]: ...
58+
59+
#
60+
@overload # known dtype, known shape (positional)
61+
def open_memmap(
62+
filename: str | os.PathLike[str],
63+
mode: _MemmapMode,
64+
dtype: _DTypeLike[_ScalarT],
65+
shape: _AnyShapeT,
66+
fortran_order: bool = False,
67+
version: _HeaderVersion | None = None,
68+
*,
69+
max_header_size: int = 10_000,
70+
) -> np.memmap[_AnyShapeT, np.dtype[_ScalarT]]: ...
71+
@overload # known dtype, known shape (keyword)
72+
def open_memmap(
73+
filename: str | os.PathLike[str],
74+
mode: _MemmapMode = "r+",
75+
*,
76+
dtype: _DTypeLike[_ScalarT],
77+
shape: _AnyShapeT,
78+
fortran_order: bool = False,
79+
version: _HeaderVersion | None = None,
80+
max_header_size: int = 10_000,
81+
) -> np.memmap[_AnyShapeT, np.dtype[_ScalarT]]: ...
82+
@overload # unknown dtype, known shape (positional)
83+
def open_memmap(
84+
filename: str | os.PathLike[str],
85+
mode: _MemmapMode,
86+
dtype: DTypeLike | None,
87+
shape: _AnyShapeT,
88+
fortran_order: bool = False,
89+
version: _HeaderVersion | None = None,
90+
*,
91+
max_header_size: int = 10_000,
92+
) -> np.memmap[_AnyShapeT, np.dtype[Any]]: ...
93+
@overload # unknown dtype, known shape (keyword)
94+
def open_memmap(
95+
filename: str | os.PathLike[str],
96+
mode: _MemmapMode = "r+",
97+
dtype: DTypeLike | None = None,
98+
*,
99+
shape: _AnyShapeT,
100+
fortran_order: bool = False,
101+
version: _HeaderVersion | None = None,
102+
max_header_size: int = 10_000,
103+
) -> np.memmap[_AnyShapeT, np.dtype[Any]]: ...
104+
@overload # known dtype, unknown shape (positional)
105+
def open_memmap(
106+
filename: str | os.PathLike[str],
107+
mode: _MemmapMode,
108+
dtype: _DTypeLike[_ScalarT],
109+
shape: tuple[int, ...] | None = None,
110+
fortran_order: bool = False,
111+
version: _HeaderVersion | None = None,
112+
*,
113+
max_header_size: int = 10_000,
114+
) -> np.memmap[Any, np.dtype[_ScalarT]]: ...
115+
@overload # known dtype, unknown shape (keyword)
30116
def open_memmap(
31-
filename: Incomplete,
32-
mode: Incomplete = ...,
33-
dtype: Incomplete = ...,
34-
shape: Incomplete = ...,
35-
fortran_order: Incomplete = ...,
36-
version: Incomplete = ...,
37-
) -> Incomplete: ...
38-
def isfileobj(f: Incomplete) -> Incomplete: ...
39-
def drop_metadata(dtype: Incomplete, /) -> Incomplete: ...
117+
filename: str | os.PathLike[str],
118+
mode: _MemmapMode = "r+",
119+
*,
120+
dtype: _DTypeLike[_ScalarT],
121+
shape: tuple[int, ...] | None = None,
122+
fortran_order: bool = False,
123+
version: _HeaderVersion | None = None,
124+
max_header_size: int = 10_000,
125+
) -> np.memmap[Any, np.dtype[_ScalarT]]: ...
126+
@overload # unknown dtype, unknown shape
127+
def open_memmap(
128+
filename: str | os.PathLike[str],
129+
mode: _MemmapMode = "r+",
130+
dtype: DTypeLike | None = None,
131+
shape: tuple[int, ...] | None = None,
132+
fortran_order: bool = False,
133+
version: _HeaderVersion | None = None,
134+
*,
135+
max_header_size: int = 10_000,
136+
) -> np.memmap[Any, np.dtype[Any]]: ...
137+
138+
#
139+
def header_data_from_array_1_0(array: np.ndarray[Any, Any]) -> _HeaderDict_1_0: ...
140+
141+
#
142+
def _read_array_header(fp: SupportsRead[bytes], version: _HeaderVersion, max_header_size: int = 10_000) -> _ArrayHeader: ...
143+
def read_array_header_1_0(fp: SupportsRead[bytes], max_header_size: int = 10_000) -> _ArrayHeader: ...
144+
def read_array_header_2_0(fp: SupportsRead[bytes], max_header_size: int = 10_000) -> _ArrayHeader: ...
145+
def read_array(
146+
fp: SupportsRead[bytes],
147+
allow_pickle: bool = False,
148+
pickle_kwargs: Mapping[str, object] | None = None,
149+
*,
150+
max_header_size: int = 10_000,
151+
) -> np.ndarray[Any, np.dtype[Any]]: ...
152+
153+
#
154+
def _write_array_header(fp: SupportsWrite[str], d: Mapping[str, str], version: _HeaderVersion | None = None) -> None: ...
155+
def write_array_header_1_0(fp: SupportsWrite[str], d: Mapping[str, str]) -> None: ...
156+
def write_array_header_2_0(fp: SupportsWrite[str], d: Mapping[str, str]) -> None: ...
157+
def write_array(
158+
fp: SupportsWrite[str],
159+
array: np.ndarray[Any, Any],
160+
version: _HeaderVersion | None = None,
161+
allow_pickle: bool = True,
162+
pickle_kwargs: Mapping[str, object] | None = None,
163+
) -> None: ...

0 commit comments

Comments
 (0)