|
1 |
| -from _typeshed import Incomplete |
2 |
| -from typing import Final, Literal |
| 1 | +import io |
| 2 | +import os |
| 3 | +from _typeshed import SupportsRead, SupportsWrite |
| 4 | +from collections.abc import Mapping, Sequence |
| 5 | +from typing import Any, Final, Literal as L, TypeAlias, TypeGuard, TypedDict, overload, type_check_only |
| 6 | +from typing_extensions import TypeVar |
| 7 | + |
| 8 | +import numpy as np |
| 9 | +from numpy import _AnyShapeT, _DTypeDescr # noqa: ICN003 |
| 10 | +from numpy._typing import DTypeLike, _DTypeLike |
| 11 | + |
| 12 | +from ._utils_impl import drop_metadata as drop_metadata |
3 | 13 |
|
4 | 14 | __all__: list[str] = []
|
5 | 15 |
|
6 |
| -EXPECTED_KEYS: Final[set[str]] |
7 |
| -MAGIC_PREFIX: Final[bytes] |
8 |
| -MAGIC_LEN: Literal[8] |
9 |
| -ARRAY_ALIGN: Literal[64] |
10 |
| -BUFFER_SIZE: Literal[262144] # 2**18 |
11 |
| -GROWTH_AXIS_MAX_DIGITS: Literal[21] |
12 |
| - |
13 |
| -def magic(major: Incomplete, minor: Incomplete) -> Incomplete: ... |
14 |
| -def read_magic(fp: Incomplete) -> Incomplete: ... |
15 |
| -def dtype_to_descr(dtype: Incomplete) -> Incomplete: ... |
16 |
| -def descr_to_dtype(descr: Incomplete) -> Incomplete: ... |
17 |
| -def header_data_from_array_1_0(array: Incomplete) -> Incomplete: ... |
18 |
| -def write_array_header_1_0(fp: Incomplete, d: Incomplete) -> Incomplete: ... |
19 |
| -def write_array_header_2_0(fp: Incomplete, d: Incomplete) -> Incomplete: ... |
20 |
| -def read_array_header_1_0(fp: Incomplete) -> Incomplete: ... |
21 |
| -def read_array_header_2_0(fp: Incomplete) -> Incomplete: ... |
22 |
| -def write_array( |
23 |
| - fp: Incomplete, |
24 |
| - array: Incomplete, |
25 |
| - version: Incomplete = ..., |
26 |
| - allow_pickle: Incomplete = ..., |
27 |
| - pickle_kwargs: Incomplete = ..., |
28 |
| -) -> Incomplete: ... |
29 |
| -def read_array(fp: Incomplete, allow_pickle: Incomplete = ..., pickle_kwargs: Incomplete = ...) -> Incomplete: ... |
| 16 | +### |
| 17 | + |
| 18 | +_ScalarT = TypeVar("_ScalarT", bound=np.generic) |
| 19 | + |
| 20 | +_ToDescr: TypeAlias = str | Sequence[tuple[str, str] | tuple[str, str, tuple[int, ...]]] |
| 21 | +_HeaderVersion: TypeAlias = tuple[L[1, 2, 3], L[0]] |
| 22 | +_MemmapMode: TypeAlias = L["r", "c", "r+", "w+"] |
| 23 | +_ArrayHeader: TypeAlias = tuple[tuple[int, ...], bool, np.dtype[Any]] |
| 24 | + |
| 25 | +@type_check_only |
| 26 | +class _HeaderDict_1_0(TypedDict): |
| 27 | + shape: tuple[int, ...] |
| 28 | + fortran_order: bool |
| 29 | + descr: _DTypeDescr |
| 30 | + |
| 31 | +### |
| 32 | + |
| 33 | +EXPECTED_KEYS: Final[set[str]] = ... |
| 34 | +MAGIC_PREFIX: Final = b"\x93NUMPY" |
| 35 | +MAGIC_LEN: Final = 16 |
| 36 | +ARRAY_ALIGN: Final = 64 |
| 37 | +BUFFER_SIZE: Final = 262_144 # 2**18 |
| 38 | +GROWTH_AXIS_MAX_DIGITS: Final = 21 |
| 39 | +_MAX_HEADER_SIZE: Final = 10_000 |
| 40 | + |
| 41 | +# |
| 42 | +def _check_version(version: _HeaderVersion | None) -> None: ... |
| 43 | +def _filter_header(s: str) -> str: ... |
| 44 | +def _wrap_header(header: str, version: _HeaderVersion) -> bytes: ... |
| 45 | +def _wrap_header_guess_version(header: str) -> bytes: ... |
| 46 | +def _read_bytes(fp: SupportsRead[bytes], size: int, error_template: str = "ran out of data") -> bytes: ... |
| 47 | + |
| 48 | +# NOTE: Don't use `TypeIs` here: It might still be of this IO type if `False` is returned |
| 49 | +def isfileobj(f: object) -> TypeGuard[io.FileIO | io.BufferedReader | io.BufferedWriter]: ... |
| 50 | + |
| 51 | +# |
| 52 | +def magic(major: int, minor: int) -> bytes: ... |
| 53 | +def read_magic(fp: SupportsRead[bytes]) -> tuple[int, int]: ... |
| 54 | + |
| 55 | +# |
| 56 | +def dtype_to_descr(dtype: np.dtype[Any]) -> _DTypeDescr | str: ... |
| 57 | +def descr_to_dtype(descr: _ToDescr) -> np.dtype[Any]: ... |
| 58 | + |
| 59 | +# |
| 60 | +@overload # known dtype, known shape (positional) |
| 61 | +def open_memmap( |
| 62 | + filename: str | os.PathLike[str], |
| 63 | + mode: _MemmapMode, |
| 64 | + dtype: _DTypeLike[_ScalarT], |
| 65 | + shape: _AnyShapeT, |
| 66 | + fortran_order: bool = False, |
| 67 | + version: _HeaderVersion | None = None, |
| 68 | + *, |
| 69 | + max_header_size: int = 10_000, |
| 70 | +) -> np.memmap[_AnyShapeT, np.dtype[_ScalarT]]: ... |
| 71 | +@overload # known dtype, known shape (keyword) |
| 72 | +def open_memmap( |
| 73 | + filename: str | os.PathLike[str], |
| 74 | + mode: _MemmapMode = "r+", |
| 75 | + *, |
| 76 | + dtype: _DTypeLike[_ScalarT], |
| 77 | + shape: _AnyShapeT, |
| 78 | + fortran_order: bool = False, |
| 79 | + version: _HeaderVersion | None = None, |
| 80 | + max_header_size: int = 10_000, |
| 81 | +) -> np.memmap[_AnyShapeT, np.dtype[_ScalarT]]: ... |
| 82 | +@overload # unknown dtype, known shape (positional) |
| 83 | +def open_memmap( |
| 84 | + filename: str | os.PathLike[str], |
| 85 | + mode: _MemmapMode, |
| 86 | + dtype: DTypeLike | None, |
| 87 | + shape: _AnyShapeT, |
| 88 | + fortran_order: bool = False, |
| 89 | + version: _HeaderVersion | None = None, |
| 90 | + *, |
| 91 | + max_header_size: int = 10_000, |
| 92 | +) -> np.memmap[_AnyShapeT, np.dtype[Any]]: ... |
| 93 | +@overload # unknown dtype, known shape (keyword) |
| 94 | +def open_memmap( |
| 95 | + filename: str | os.PathLike[str], |
| 96 | + mode: _MemmapMode = "r+", |
| 97 | + dtype: DTypeLike | None = None, |
| 98 | + *, |
| 99 | + shape: _AnyShapeT, |
| 100 | + fortran_order: bool = False, |
| 101 | + version: _HeaderVersion | None = None, |
| 102 | + max_header_size: int = 10_000, |
| 103 | +) -> np.memmap[_AnyShapeT, np.dtype[Any]]: ... |
| 104 | +@overload # known dtype, unknown shape (positional) |
| 105 | +def open_memmap( |
| 106 | + filename: str | os.PathLike[str], |
| 107 | + mode: _MemmapMode, |
| 108 | + dtype: _DTypeLike[_ScalarT], |
| 109 | + shape: tuple[int, ...] | None = None, |
| 110 | + fortran_order: bool = False, |
| 111 | + version: _HeaderVersion | None = None, |
| 112 | + *, |
| 113 | + max_header_size: int = 10_000, |
| 114 | +) -> np.memmap[Any, np.dtype[_ScalarT]]: ... |
| 115 | +@overload # known dtype, unknown shape (keyword) |
30 | 116 | def open_memmap(
|
31 |
| - filename: Incomplete, |
32 |
| - mode: Incomplete = ..., |
33 |
| - dtype: Incomplete = ..., |
34 |
| - shape: Incomplete = ..., |
35 |
| - fortran_order: Incomplete = ..., |
36 |
| - version: Incomplete = ..., |
37 |
| -) -> Incomplete: ... |
38 |
| -def isfileobj(f: Incomplete) -> Incomplete: ... |
39 |
| -def drop_metadata(dtype: Incomplete, /) -> Incomplete: ... |
| 117 | + filename: str | os.PathLike[str], |
| 118 | + mode: _MemmapMode = "r+", |
| 119 | + *, |
| 120 | + dtype: _DTypeLike[_ScalarT], |
| 121 | + shape: tuple[int, ...] | None = None, |
| 122 | + fortran_order: bool = False, |
| 123 | + version: _HeaderVersion | None = None, |
| 124 | + max_header_size: int = 10_000, |
| 125 | +) -> np.memmap[Any, np.dtype[_ScalarT]]: ... |
| 126 | +@overload # unknown dtype, unknown shape |
| 127 | +def open_memmap( |
| 128 | + filename: str | os.PathLike[str], |
| 129 | + mode: _MemmapMode = "r+", |
| 130 | + dtype: DTypeLike | None = None, |
| 131 | + shape: tuple[int, ...] | None = None, |
| 132 | + fortran_order: bool = False, |
| 133 | + version: _HeaderVersion | None = None, |
| 134 | + *, |
| 135 | + max_header_size: int = 10_000, |
| 136 | +) -> np.memmap[Any, np.dtype[Any]]: ... |
| 137 | + |
| 138 | +# |
| 139 | +def header_data_from_array_1_0(array: np.ndarray[Any, Any]) -> _HeaderDict_1_0: ... |
| 140 | + |
| 141 | +# |
| 142 | +def _read_array_header(fp: SupportsRead[bytes], version: _HeaderVersion, max_header_size: int = 10_000) -> _ArrayHeader: ... |
| 143 | +def read_array_header_1_0(fp: SupportsRead[bytes], max_header_size: int = 10_000) -> _ArrayHeader: ... |
| 144 | +def read_array_header_2_0(fp: SupportsRead[bytes], max_header_size: int = 10_000) -> _ArrayHeader: ... |
| 145 | +def read_array( |
| 146 | + fp: SupportsRead[bytes], |
| 147 | + allow_pickle: bool = False, |
| 148 | + pickle_kwargs: Mapping[str, object] | None = None, |
| 149 | + *, |
| 150 | + max_header_size: int = 10_000, |
| 151 | +) -> np.ndarray[Any, np.dtype[Any]]: ... |
| 152 | + |
| 153 | +# |
| 154 | +def _write_array_header(fp: SupportsWrite[str], d: Mapping[str, str], version: _HeaderVersion | None = None) -> None: ... |
| 155 | +def write_array_header_1_0(fp: SupportsWrite[str], d: Mapping[str, str]) -> None: ... |
| 156 | +def write_array_header_2_0(fp: SupportsWrite[str], d: Mapping[str, str]) -> None: ... |
| 157 | +def write_array( |
| 158 | + fp: SupportsWrite[str], |
| 159 | + array: np.ndarray[Any, Any], |
| 160 | + version: _HeaderVersion | None = None, |
| 161 | + allow_pickle: bool = True, |
| 162 | + pickle_kwargs: Mapping[str, object] | None = None, |
| 163 | +) -> None: ... |
0 commit comments