Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
11 changes: 11 additions & 0 deletions pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -311,6 +311,8 @@ extend-select = [
"PERF", # Perflint
"PIE", # flake8-pie
"PGH", # pygrep-hooks
"PLE", # Pylint Error
"PLR", # Pylint Refactor
"PT", # flake8-pytest-style
"PYI", # flake8-pyi
"RET", # flake8-return
Expand All @@ -325,6 +327,15 @@ extend-select = [
]
ignore = [
"ANN401",
"PLR0124",
"PLR0904",
"PLR0911",
"PLR0912",
"PLR0913",
"PLR0914",
"PLR0915",
"PLR0917",
"PLR2004",
"PT011", # TODO: apply this rule
"RET505",
"RET506",
Expand Down
11 changes: 5 additions & 6 deletions src/zarr/core/codec_pipeline.py
Original file line number Diff line number Diff line change
Expand Up @@ -410,13 +410,12 @@ async def _read_key(
):
if chunk_array is None:
chunk_array_batch.append(None) # type: ignore[unreachable]
elif not chunk_spec.config.write_empty_chunks and chunk_array.all_equal(
fill_value_or_default(chunk_spec)
):
chunk_array_batch.append(None)
else:
if not chunk_spec.config.write_empty_chunks and chunk_array.all_equal(
fill_value_or_default(chunk_spec)
):
chunk_array_batch.append(None)
else:
chunk_array_batch.append(chunk_array)
chunk_array_batch.append(chunk_array)

chunk_bytes_batch = await self.encode_batch(
[
Expand Down
87 changes: 41 additions & 46 deletions src/zarr/core/group.py
Original file line number Diff line number Diff line change
Expand Up @@ -319,19 +319,18 @@ def flatten(
children: dict[str, ArrayV2Metadata | ArrayV3Metadata | GroupMetadata] = {}
if isinstance(group, ArrayV2Metadata | ArrayV3Metadata):
children[key] = group
elif group.consolidated_metadata and group.consolidated_metadata.metadata is not None:
children[key] = replace(
group, consolidated_metadata=ConsolidatedMetadata(metadata={})
)
for name, val in group.consolidated_metadata.metadata.items():
full_key = f"{key}/{name}"
if isinstance(val, GroupMetadata):
children.update(flatten(full_key, val))
else:
children[full_key] = val
else:
if group.consolidated_metadata and group.consolidated_metadata.metadata is not None:
children[key] = replace(
group, consolidated_metadata=ConsolidatedMetadata(metadata={})
)
for name, val in group.consolidated_metadata.metadata.items():
full_key = f"{key}/{name}"
if isinstance(val, GroupMetadata):
children.update(flatten(full_key, val))
else:
children[full_key] = val
else:
children[key] = replace(group, consolidated_metadata=None)
children[key] = replace(group, consolidated_metadata=None)
return children

for k, v in self.metadata.items():
Expand Down Expand Up @@ -1275,9 +1274,8 @@ async def require_array(
if exact:
if ds.dtype != dtype:
raise TypeError(f"Incompatible dtype ({ds.dtype} vs {dtype})")
else:
if not np.can_cast(ds.dtype, dtype):
raise TypeError(f"Incompatible dtype ({ds.dtype} vs {dtype})")
elif not np.can_cast(ds.dtype, dtype):
raise TypeError(f"Incompatible dtype ({ds.dtype} vs {dtype})")
except KeyError:
ds = await self.create_array(name, shape=shape, dtype=dtype, **kwargs)

Expand Down Expand Up @@ -3277,24 +3275,23 @@ async def create_hierarchy(
else:
# Any other exception is a real error
raise extant_node
else:
# this is a node that already exists, but a node with the same key was specified
# in nodes_parsed.
if isinstance(extant_node, GroupMetadata):
# a group already exists where we want to create a group
if isinstance(proposed_node, ImplicitGroupMarker):
# we have proposed an implicit group, which is OK -- we will just skip
# creating this particular metadata document
redundant_implicit_groups.append(key)
else:
# we have proposed an explicit group, which is an error, given that a
# group already exists.
msg = f"A group exists in store {store!r} at path {key!r}."
raise ContainsGroupError(msg)
elif isinstance(extant_node, ArrayV2Metadata | ArrayV3Metadata):
# we are trying to overwrite an existing array. this is an error.
msg = f"An array exists in store {store!r} at path {key!r}."
raise ContainsArrayError(msg)
# this is a node that already exists, but a node with the same key was specified
# in nodes_parsed.
elif isinstance(extant_node, GroupMetadata):
# a group already exists where we want to create a group
if isinstance(proposed_node, ImplicitGroupMarker):
# we have proposed an implicit group, which is OK -- we will just skip
# creating this particular metadata document
redundant_implicit_groups.append(key)
else:
# we have proposed an explicit group, which is an error, given that a
# group already exists.
msg = f"A group exists in store {store!r} at path {key!r}."
raise ContainsGroupError(msg)
elif isinstance(extant_node, ArrayV2Metadata | ArrayV3Metadata):
# we are trying to overwrite an existing array. this is an error.
msg = f"An array exists in store {store!r} at path {key!r}."
raise ContainsArrayError(msg)

nodes_explicit: dict[str, GroupMetadata | ArrayV2Metadata | ArrayV3Metadata] = {}

Expand Down Expand Up @@ -3440,7 +3437,7 @@ def _parse_hierarchy_dict(
# but not if an empty dict was provided, because any empty hierarchy has no nodes
if len(data_normed_keys) > 0 and "" not in data_normed_keys:
z_format = next(iter(data_normed_keys.values())).zarr_format
data_normed_keys = data_normed_keys | {"": ImplicitGroupMarker(zarr_format=z_format)}
data_normed_keys |= {"": ImplicitGroupMarker(zarr_format=z_format)}

out: dict[str, GroupMetadata | ArrayV2Metadata | ArrayV3Metadata] = {**data_normed_keys}

Expand All @@ -3454,13 +3451,12 @@ def _parse_hierarchy_dict(
# If a component is not already in the output dict, add ImplicitGroupMetadata
if subpath not in out:
out[subpath] = ImplicitGroupMarker(zarr_format=v.zarr_format)
else:
if not isinstance(out[subpath], GroupMetadata | ImplicitGroupMarker):
msg = (
f"The node at {subpath} contains other nodes, but it is not a Zarr group. "
"This is invalid. Only Zarr groups can contain other nodes."
)
raise ValueError(msg)
elif not isinstance(out[subpath], GroupMetadata | ImplicitGroupMarker):
msg = (
f"The node at {subpath} contains other nodes, but it is not a Zarr group. "
"This is invalid. Only Zarr groups can contain other nodes."
)
raise ValueError(msg)
return out


Expand Down Expand Up @@ -3668,12 +3664,11 @@ async def _read_metadata_v2(store: Store, path: str) -> ArrayV2Metadata | GroupM
# return the array metadata.
if zarray_bytes is not None:
zmeta = json.loads(zarray_bytes.to_bytes())
elif zgroup_bytes is None:
# neither .zarray or .zgroup were found results in KeyError
raise FileNotFoundError(path)
else:
if zgroup_bytes is None:
# neither .zarray or .zgroup were found results in KeyError
raise FileNotFoundError(path)
else:
zmeta = json.loads(zgroup_bytes.to_bytes())
zmeta = json.loads(zgroup_bytes.to_bytes())

return _build_metadata_v2(zmeta, zattrs)

Expand Down
35 changes: 15 additions & 20 deletions tests/test_group.py
Original file line number Diff line number Diff line change
Expand Up @@ -372,16 +372,13 @@ def test_group_getitem(store: Store, zarr_format: ZarrFormat, consolidated: bool
group = zarr.api.synchronous.consolidate_metadata(
store=store, zarr_format=zarr_format
)
else:
if isinstance(store, ZipStore):
with pytest.warns(UserWarning, match="Duplicate name: "):
group = zarr.api.synchronous.consolidate_metadata(
store=store, zarr_format=zarr_format
)
else:
elif isinstance(store, ZipStore):
with pytest.warns(UserWarning, match="Duplicate name: "):
group = zarr.api.synchronous.consolidate_metadata(
store=store, zarr_format=zarr_format
)
else:
group = zarr.api.synchronous.consolidate_metadata(store=store, zarr_format=zarr_format)
# we're going to assume that `group.metadata` is correct, and reuse that to focus
# on indexing in this test. Other tests verify the correctness of group.metadata
object.__setattr__(
Expand Down Expand Up @@ -581,12 +578,11 @@ def test_group_child_iterators(store: Store, zarr_format: ZarrFormat, consolidat
group = zarr.consolidate_metadata(store)
else:
group = zarr.consolidate_metadata(store)
else:
if isinstance(store, ZipStore):
with pytest.warns(UserWarning, match="Duplicate name: "):
group = zarr.consolidate_metadata(store)
else:
elif isinstance(store, ZipStore):
with pytest.warns(UserWarning, match="Duplicate name: "):
group = zarr.consolidate_metadata(store)
else:
group = zarr.consolidate_metadata(store)
if zarr_format == 2:
metadata = {
"subarray": {
Expand Down Expand Up @@ -1443,15 +1439,14 @@ async def test_members_name(store: Store, consolidate: bool, zarr_format: ZarrFo
group = zarr.api.synchronous.consolidate_metadata(store)
else:
group = zarr.api.synchronous.consolidate_metadata(store)
else:
if zarr_format == 3:
with pytest.warns(
ZarrUserWarning,
match="Consolidated metadata is currently not part in the Zarr format 3 specification.",
):
group = zarr.api.synchronous.consolidate_metadata(store)
else:
elif zarr_format == 3:
with pytest.warns(
ZarrUserWarning,
match="Consolidated metadata is currently not part in the Zarr format 3 specification.",
):
group = zarr.api.synchronous.consolidate_metadata(store)
else:
group = zarr.api.synchronous.consolidate_metadata(store)
result = group["a"]["b"]
assert result.name == "/a/b"

Expand Down