Skip to content

Commit 5df2f6f

Browse files
authored
Merge branch 'main' into gpu-test
2 parents 22741dd + bb55f0c commit 5df2f6f

File tree

13 files changed

+75
-36
lines changed

13 files changed

+75
-36
lines changed

.github/workflows/test.yml

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -64,6 +64,7 @@ jobs:
6464
run: |
6565
hatch env run --env test.py${{ matrix.python-version }}-${{ matrix.numpy-version }}-${{ matrix.dependency-set }} run-coverage
6666
- name: Upload coverage
67+
if: ${{ matrix.dependency-set == 'optional' && matrix.os == 'ubuntu-latest' }}
6768
uses: codecov/codecov-action@v5
6869
with:
6970
token: ${{ secrets.CODECOV_TOKEN }}

changes/2991.doc.rst

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1 @@
1+
Updated the 3.0 migration guide to include the removal of "." syntax for getting group members.

changes/2996.bugfix.rst

Lines changed: 4 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,4 @@
1+
Fixes `ConsolidatedMetadata` serialization of `nan`, `inf`, and `-inf` to be
2+
consistent with the behavior of `ArrayMetadata`.
3+
4+

docs/user-guide/v3_migration.rst

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -117,6 +117,8 @@ The Group class
117117

118118
- Use :func:`zarr.Group.create_array` in place of :func:`zarr.Group.create_dataset`
119119
- Use :func:`zarr.Group.require_array` in place of :func:`zarr.Group.require_dataset`
120+
3. Disallow "." syntax for getting group members. To get a member of a group named ``foo``,
121+
use ``group["foo"]`` in place of ``group.foo``.
120122

121123
The Store class
122124
~~~~~~~~~~~~~~~

pyproject.toml

Lines changed: 14 additions & 19 deletions
Original file line numberDiff line numberDiff line change
@@ -73,18 +73,19 @@ gpu = [
7373
test = [
7474
"coverage",
7575
"pytest",
76+
"pytest-asyncio",
7677
"pytest-cov",
78+
"pytest-accept",
79+
"rich",
80+
"mypy",
81+
"hypothesis",
82+
]
83+
remote_tests = [
7784
'zarr[remote]',
7885
"botocore",
7986
"s3fs",
8087
"moto[s3,server]",
81-
"pytest-asyncio",
82-
"pytest-accept",
8388
"requests",
84-
"rich",
85-
"mypy",
86-
"hypothesis",
87-
"universal-pathlib",
8889
]
8990
optional = ["rich", "universal-pathlib"]
9091
docs = [
@@ -143,28 +144,21 @@ hooks.vcs.version-file = "src/zarr/_version.py"
143144
[tool.hatch.envs.test]
144145
dependencies = [
145146
"numpy~={matrix:numpy}",
146-
"universal_pathlib",
147147
]
148148
features = ["test"]
149149

150150
[[tool.hatch.envs.test.matrix]]
151151
python = ["3.11", "3.12", "3.13"]
152152
numpy = ["1.25", "2.1"]
153-
version = ["minimal"]
154-
155-
[[tool.hatch.envs.test.matrix]]
156-
python = ["3.11", "3.12", "3.13"]
157-
numpy = ["1.25", "2.1"]
158-
features = ["optional"]
153+
deps = ["minimal", "optional"]
159154

160-
[[tool.hatch.envs.test.matrix]]
161-
python = ["3.11", "3.12", "3.13"]
162-
numpy = ["1.25", "2.1"]
163-
features = ["gpu"]
155+
[tool.hatch.envs.test.overrides]
156+
matrix.deps.dependencies = [
157+
{value = "zarr[remote, remote_tests, test, optional]", if = ["optional"]}
158+
]
164159

165160
[tool.hatch.envs.test.scripts]
166161
run-coverage = "pytest --cov-config=pyproject.toml --cov=pkg --cov-report xml --cov=src --junitxml=junit.xml -o junit_family=legacy"
167-
run-coverage-gpu = "pip install cupy-cuda12x && pytest -m gpu --cov-config=pyproject.toml --cov=pkg --cov-report xml --cov=src --junitxml=junit.xml -o junit_family=legacy"
168162
run-coverage-html = "pytest --cov-config=pyproject.toml --cov=pkg --cov-report html --cov=src"
169163
run = "run-coverage --no-cov"
170164
run-pytest = "run"
@@ -174,7 +168,7 @@ run-hypothesis = "run-coverage --hypothesis-profile ci --run-slow-hypothesis tes
174168
list-env = "pip list"
175169

176170
[tool.hatch.envs.doctest]
177-
features = ["test", "optional", "remote"]
171+
features = ["test", "optional", "remote", "remote_tests"]
178172
description = "Test environment for doctests"
179173

180174
[tool.hatch.envs.doctest.scripts]
@@ -255,6 +249,7 @@ dependencies = [
255249
'universal_pathlib==0.0.22',
256250
'typing_extensions==4.9.*',
257251
'donfig==0.8.*',
252+
'obstore==0.5.*',
258253
# test deps
259254
'zarr[test]',
260255
]

src/zarr/codecs/bytes.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -114,7 +114,7 @@ async def _encode_single(
114114

115115
nd_array = chunk_array.as_ndarray_like()
116116
# Flatten the nd-array (only copy if needed) and reinterpret as bytes
117-
nd_array = nd_array.ravel().view(dtype="b")
117+
nd_array = nd_array.ravel().view(dtype="B")
118118
return chunk_spec.prototype.buffer.from_array_like(nd_array)
119119

120120
def compute_encoded_size(self, input_byte_length: int, _chunk_spec: ArraySpec) -> int:

src/zarr/codecs/crc32c_.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -57,7 +57,7 @@ async def _encode_single(
5757
# Calculate the checksum and "cast" it to a numpy array
5858
checksum = np.array([crc32c(cast(typing_extensions.Buffer, data))], dtype=np.uint32)
5959
# Append the checksum (as bytes) to the data
60-
return chunk_spec.prototype.buffer.from_array_like(np.append(data, checksum.view("b")))
60+
return chunk_spec.prototype.buffer.from_array_like(np.append(data, checksum.view("B")))
6161

6262
def compute_encoded_size(self, input_byte_length: int, _chunk_spec: ArraySpec) -> int:
6363
return input_byte_length + 4

src/zarr/core/buffer/core.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -143,7 +143,7 @@ class Buffer(ABC):
143143
def __init__(self, array_like: ArrayLike) -> None:
144144
if array_like.ndim != 1:
145145
raise ValueError("array_like: only 1-dim allowed")
146-
if array_like.dtype != np.dtype("b"):
146+
if array_like.dtype != np.dtype("B"):
147147
raise ValueError("array_like: only byte dtype allowed")
148148
self._data = array_like
149149

@@ -306,7 +306,7 @@ class NDBuffer:
306306
Notes
307307
-----
308308
The two buffer classes Buffer and NDBuffer are very similar. In fact, Buffer
309-
is a special case of NDBuffer where dim=1, stride=1, and dtype="b". However,
309+
is a special case of NDBuffer where dim=1, stride=1, and dtype="B". However,
310310
in order to use Python's type system to differentiate between the contiguous
311311
Buffer and the n-dim (non-contiguous) NDBuffer, we keep the definition of the
312312
two classes separate.

src/zarr/core/buffer/cpu.py

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -49,7 +49,7 @@ def __init__(self, array_like: ArrayLike) -> None:
4949

5050
@classmethod
5151
def create_zero_length(cls) -> Self:
52-
return cls(np.array([], dtype="b"))
52+
return cls(np.array([], dtype="B"))
5353

5454
@classmethod
5555
def from_buffer(cls, buffer: core.Buffer) -> Self:
@@ -92,7 +92,7 @@ def from_bytes(cls, bytes_like: BytesLike) -> Self:
9292
-------
9393
New buffer representing `bytes_like`
9494
"""
95-
return cls.from_array_like(np.frombuffer(bytes_like, dtype="b"))
95+
return cls.from_array_like(np.frombuffer(bytes_like, dtype="B"))
9696

9797
def as_numpy_array(self) -> npt.NDArray[Any]:
9898
"""Returns the buffer as a NumPy array (host memory).
@@ -111,7 +111,7 @@ def __add__(self, other: core.Buffer) -> Self:
111111
"""Concatenate two buffers"""
112112

113113
other_array = other.as_array_like()
114-
assert other_array.dtype == np.dtype("b")
114+
assert other_array.dtype == np.dtype("B")
115115
return self.__class__(
116116
np.concatenate((np.asanyarray(self._data), np.asanyarray(other_array)))
117117
)
@@ -131,7 +131,7 @@ class NDBuffer(core.NDBuffer):
131131
Notes
132132
-----
133133
The two buffer classes Buffer and NDBuffer are very similar. In fact, Buffer
134-
is a special case of NDBuffer where dim=1, stride=1, and dtype="b". However,
134+
is a special case of NDBuffer where dim=1, stride=1, and dtype="B". However,
135135
in order to use Python's type system to differentiate between the contiguous
136136
Buffer and the n-dim (non-contiguous) NDBuffer, we keep the definition of the
137137
two classes separate.

src/zarr/core/buffer/gpu.py

Lines changed: 5 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -59,7 +59,7 @@ def __init__(self, array_like: ArrayLike) -> None:
5959

6060
if array_like.ndim != 1:
6161
raise ValueError("array_like: only 1-dim allowed")
62-
if array_like.dtype != np.dtype("b"):
62+
if array_like.dtype != np.dtype("B"):
6363
raise ValueError("array_like: only byte dtype allowed")
6464

6565
if not hasattr(array_like, "__cuda_array_interface__"):
@@ -84,7 +84,7 @@ def create_zero_length(cls) -> Self:
8484
-------
8585
New empty 0-length buffer
8686
"""
87-
return cls(cp.array([], dtype="b"))
87+
return cls(cp.array([], dtype="B"))
8888

8989
@classmethod
9090
def from_buffer(cls, buffer: core.Buffer) -> Self:
@@ -100,14 +100,14 @@ def from_buffer(cls, buffer: core.Buffer) -> Self:
100100

101101
@classmethod
102102
def from_bytes(cls, bytes_like: BytesLike) -> Self:
103-
return cls.from_array_like(cp.frombuffer(bytes_like, dtype="b"))
103+
return cls.from_array_like(cp.frombuffer(bytes_like, dtype="B"))
104104

105105
def as_numpy_array(self) -> npt.NDArray[Any]:
106106
return cast(npt.NDArray[Any], cp.asnumpy(self._data))
107107

108108
def __add__(self, other: core.Buffer) -> Self:
109109
other_array = other.as_array_like()
110-
assert other_array.dtype == np.dtype("b")
110+
assert other_array.dtype == np.dtype("B")
111111
gpu_other = Buffer(other_array)
112112
gpu_other_array = gpu_other.as_array_like()
113113
return self.__class__(
@@ -129,7 +129,7 @@ class NDBuffer(core.NDBuffer):
129129
Notes
130130
-----
131131
The two buffer classes Buffer and NDBuffer are very similar. In fact, Buffer
132-
is a special case of NDBuffer where dim=1, stride=1, and dtype="b". However,
132+
is a special case of NDBuffer where dim=1, stride=1, and dtype="B". However,
133133
in order to use Python's type system to differentiate between the contiguous
134134
Buffer and the n-dim (non-contiguous) NDBuffer, we keep the definition of the
135135
two classes separate.

0 commit comments

Comments
 (0)