Skip to content

Commit f0d61b2

Browse files
committed
Merge branch 'v3' of https://github.com/zarr-developers/zarr-python into add-array-storage-helpers
2 parents c953f21 + 19ed733 commit f0d61b2

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

49 files changed

+376
-292
lines changed

.deepsource.toml

Lines changed: 0 additions & 7 deletions
This file was deleted.

.github/ISSUE_TEMPLATE/config.yml

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -3,9 +3,9 @@ contact_links:
33
- name: ✨ Propose a new major feature
44
url: https://github.com/zarr-developers/zarr-specs
55
about: A new major feature should be discussed in the Zarr specifications repository.
6-
- name: ❓ Discuss something on gitter
7-
url: https://gitter.im/zarr-developers/community
8-
about: For questions like "How do I do X with Zarr?", you can move to our Gitter channel.
6+
- name: ❓ Discuss something on ZulipChat
7+
url: https://ossci.zulipchat.com/
8+
about: For questions like "How do I do X with Zarr?", you can move to our ZulipChat.
99
- name: ❓ Discuss something on GitHub Discussions
1010
url: https://github.com/zarr-developers/zarr-python/discussions
1111
about: For questions like "How do I do X with Zarr?", you can move to GitHub Discussions.

.gitignore

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -84,3 +84,5 @@ fixture/
8484
.DS_Store
8585
tests/.hypothesis
8686
.hypothesis/
87+
88+
zarr/version.py

bench/compress_normal.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -16,7 +16,7 @@
1616
a,
1717
chunks=1000000,
1818
compression="blosc",
19-
compression_opts=dict(cname="lz4", clevel=5, shuffle=2),
19+
compression_opts={"cname": "lz4", "clevel": 5, "shuffle": 2},
2020
)
2121
print(z)
2222

pyproject.toml

Lines changed: 32 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -207,18 +207,45 @@ extend-exclude = [
207207

208208
[tool.ruff.lint]
209209
extend-select = [
210-
"B", # flake8-bugbear
211-
"I", # isort
212-
"ISC",
213-
"UP", # pyupgrade
214-
"RSE",
210+
"B", # flake8-bugbear
211+
"C4", # flake8-comprehensions
212+
"FLY", # flynt
213+
"I", # isort
214+
"ISC", # flake8-implicit-str-concat
215+
"PGH", # pygrep-hooks
216+
"PT", # flake8-pytest-style
217+
"PYI", # flake8-pyi
218+
"RSE", # flake8-raise
219+
"RET", # flake8-return
215220
"RUF",
216221
"TCH", # flake8-type-checking
217222
"TRY", # tryceratops
223+
"UP", # pyupgrade
218224
]
219225
ignore = [
226+
"PT004", # deprecated
227+
"PT011", # TODO: apply this rule
228+
"PT012", # TODO: apply this rule
229+
"PYI013",
230+
"RET505",
231+
"RET506",
220232
"RUF005",
221233
"TRY003",
234+
# https://docs.astral.sh/ruff/formatter/#conflicting-lint-rules
235+
"W191",
236+
"E111",
237+
"E114",
238+
"E117",
239+
"D206",
240+
"D300",
241+
"Q000",
242+
"Q001",
243+
"Q002",
244+
"Q003",
245+
"COM812",
246+
"COM819",
247+
"ISC001",
248+
"ISC002",
222249
]
223250

224251
[tool.mypy]

src/zarr/abc/store.py

Lines changed: 10 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -2,10 +2,12 @@
22

33
from abc import ABC, abstractmethod
44
from asyncio import gather
5+
from collections.abc import AsyncGenerator, Iterable
56
from typing import TYPE_CHECKING, Any, NamedTuple, Protocol, runtime_checkable
67

78
if TYPE_CHECKING:
89
from collections.abc import AsyncGenerator, Iterable
10+
from types import TracebackType
911
from typing import Any, TypeAlias
1012

1113
from typing_extensions import Self
@@ -42,7 +44,7 @@ class Store(ABC):
4244
_mode: AccessMode
4345
_is_open: bool
4446

45-
def __init__(self, mode: AccessModeLiteral = "r", *args: Any, **kwargs: Any):
47+
def __init__(self, mode: AccessModeLiteral = "r", *args: Any, **kwargs: Any) -> None:
4648
self._is_open = False
4749
self._mode = AccessMode.from_literal(mode)
4850

@@ -56,7 +58,12 @@ def __enter__(self) -> Self:
5658
"""Enter a context manager that will close the store upon exiting."""
5759
return self
5860

59-
def __exit__(self, *args: Any) -> None:
61+
def __exit__(
62+
self,
63+
exc_type: type[BaseException] | None,
64+
exc_value: BaseException | None,
65+
traceback: TracebackType | None,
66+
) -> None:
6067
"""Close the store."""
6168
self.close()
6269

@@ -171,7 +178,7 @@ async def _set_many(self, values: Iterable[tuple[str, Buffer]]) -> None:
171178
Insert multiple (key, value) pairs into storage.
172179
"""
173180
await gather(*(self.set(key, value) for key, value in values))
174-
return None
181+
return
175182

176183
@property
177184
@abstractmethod

src/zarr/api/asynchronous.py

Lines changed: 5 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -2,7 +2,7 @@
22

33
import asyncio
44
import warnings
5-
from typing import TYPE_CHECKING, Any, Literal, Union, cast
5+
from typing import TYPE_CHECKING, Any, Literal, cast
66

77
import numpy as np
88
import numpy.typing as npt
@@ -25,6 +25,10 @@
2525
from zarr.core.buffer import NDArrayLike
2626
from zarr.core.chunk_key_encodings import ChunkKeyEncoding
2727

28+
# TODO: this type could use some more thought
29+
ArrayLike = AsyncArray | Array | npt.NDArray[Any]
30+
PathLike = str
31+
2832
__all__ = [
2933
"consolidate_metadata",
3034
"copy",
@@ -53,10 +57,6 @@
5357
"zeros_like",
5458
]
5559

56-
# TODO: this type could use some more thought, noqa to avoid "Variable "asynchronous.ArrayLike" is not valid as a type"
57-
ArrayLike = Union[AsyncArray | Array | npt.NDArray[Any]] # noqa
58-
PathLike = str
59-
6060

6161
def _get_shape_chunks(a: ArrayLike | Any) -> tuple[ChunkCoords | None, ChunkCoords | None]:
6262
"""helper function to get the shape and chunks from an array-like object"""

src/zarr/codecs/_v2.py

Lines changed: 6 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -8,16 +8,18 @@
88

99
from zarr.abc.codec import ArrayArrayCodec, ArrayBytesCodec
1010
from zarr.core.buffer import Buffer, NDBuffer, default_buffer_prototype
11-
from zarr.core.common import JSON, to_thread
11+
from zarr.core.common import to_thread
1212
from zarr.registry import get_ndbuffer_class
1313

1414
if TYPE_CHECKING:
15+
import numcodecs.abc
16+
1517
from zarr.core.array_spec import ArraySpec
1618

1719

1820
@dataclass(frozen=True)
1921
class V2Compressor(ArrayBytesCodec):
20-
compressor: dict[str, JSON] | None
22+
compressor: numcodecs.abc.Codec | None
2123

2224
is_fixed_size = False
2325

@@ -27,9 +29,8 @@ async def _decode_single(
2729
chunk_spec: ArraySpec,
2830
) -> NDBuffer:
2931
if self.compressor is not None:
30-
compressor = numcodecs.get_codec(self.compressor)
3132
chunk_numpy_array = ensure_ndarray(
32-
await to_thread(compressor.decode, chunk_bytes.as_array_like())
33+
await to_thread(self.compressor.decode, chunk_bytes.as_array_like())
3334
)
3435
else:
3536
chunk_numpy_array = ensure_ndarray(chunk_bytes.as_array_like())
@@ -47,14 +48,13 @@ async def _encode_single(
4748
) -> Buffer | None:
4849
chunk_numpy_array = chunk_array.as_numpy_array()
4950
if self.compressor is not None:
50-
compressor = numcodecs.get_codec(self.compressor)
5151
if (
5252
not chunk_numpy_array.flags.c_contiguous
5353
and not chunk_numpy_array.flags.f_contiguous
5454
):
5555
chunk_numpy_array = chunk_numpy_array.copy(order="A")
5656
encoded_chunk_bytes = ensure_bytes(
57-
await to_thread(compressor.encode, chunk_numpy_array)
57+
await to_thread(self.compressor.encode, chunk_numpy_array)
5858
)
5959
else:
6060
encoded_chunk_bytes = ensure_bytes(chunk_numpy_array)

src/zarr/codecs/sharding.py

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -151,7 +151,7 @@ def is_dense(self, chunk_byte_length: int) -> bool:
151151

152152
# Are all non-empty offsets unique?
153153
if len(
154-
set(offset for offset, _ in sorted_offsets_and_lengths if offset != MAX_UINT_64)
154+
{offset for offset, _ in sorted_offsets_and_lengths if offset != MAX_UINT_64}
155155
) != len(sorted_offsets_and_lengths):
156156
return False
157157

@@ -380,8 +380,8 @@ def to_dict(self) -> dict[str, JSON]:
380380
"name": "sharding_indexed",
381381
"configuration": {
382382
"chunk_shape": self.chunk_shape,
383-
"codecs": tuple([s.to_dict() for s in self.codecs]),
384-
"index_codecs": tuple([s.to_dict() for s in self.index_codecs]),
383+
"codecs": tuple(s.to_dict() for s in self.codecs),
384+
"index_codecs": tuple(s.to_dict() for s in self.index_codecs),
385385
"index_location": self.index_location.value,
386386
},
387387
}
@@ -477,7 +477,7 @@ async def _decode_partial_single(
477477
)
478478

479479
indexed_chunks = list(indexer)
480-
all_chunk_coords = set(chunk_coords for chunk_coords, _, _ in indexed_chunks)
480+
all_chunk_coords = {chunk_coords for chunk_coords, _, _ in indexed_chunks}
481481

482482
# reading bytes of all requested chunks
483483
shard_dict: ShardMapping = {}

src/zarr/codecs/transpose.py

Lines changed: 2 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -96,16 +96,14 @@ async def _decode_single(
9696
chunk_spec: ArraySpec,
9797
) -> NDBuffer:
9898
inverse_order = np.argsort(self.order)
99-
chunk_array = chunk_array.transpose(inverse_order)
100-
return chunk_array
99+
return chunk_array.transpose(inverse_order)
101100

102101
async def _encode_single(
103102
self,
104103
chunk_array: NDBuffer,
105104
_chunk_spec: ArraySpec,
106105
) -> NDBuffer | None:
107-
chunk_array = chunk_array.transpose(self.order)
108-
return chunk_array
106+
return chunk_array.transpose(self.order)
109107

110108
def compute_encoded_size(self, input_byte_length: int, _chunk_spec: ArraySpec) -> int:
111109
return input_byte_length

0 commit comments

Comments
 (0)