Skip to content

Commit cf0615b

Browse files
committed
update from_dict
1 parent 6125c1b commit cf0615b

File tree

1 file changed

+17
-21
lines changed
  • src/zarr/core/metadata

1 file changed

+17
-21
lines changed

src/zarr/core/metadata/v3.py

Lines changed: 17 additions & 21 deletions
Original file line numberDiff line numberDiff line change
@@ -56,11 +56,13 @@ def parse_codecs(data: object) -> tuple[Codec, ...]:
5656

5757
return out
5858

59+
5960
def parse_dimension_names(data: DimensionNames) -> tuple[str | None, ...] | None:
6061
if data is None:
6162
return None
6263
return tuple(data)
6364

65+
6466
def parse_storage_transformers(data: object) -> tuple[dict[str, JSON], ...]:
6567
"""
6668
Parse storage_transformers. Zarr python cannot use storage transformers
@@ -271,44 +273,38 @@ def to_buffer_dict(self, prototype: BufferPrototype) -> dict[str, Buffer]:
271273
)
272274
}
273275

276+
# this type annotation violates liskov but that's a problem we need to fix at the base class
274277
@classmethod
275-
def from_dict(cls, data: dict[str, JSON]) -> Self:
276-
# type check the dict
277-
tycheck = check_type(data, ArrayMetadataJSON_V3)
278-
if not tycheck.success:
279-
raise ValueError(f"Invalid metadata: {data!r}. {tycheck.errors}")
280-
281-
# make a copy because we are modifying the dict
282-
283-
_data = data.copy()
284-
285-
data_type_json = _data.pop("data_type")
278+
def from_dict(cls, data: ArrayMetadataJSON_V3) -> Self: # type: ignore[override]
279+
data_type_json = data["data_type"]
286280
data_type = get_data_type_from_json(data_type_json, zarr_format=3)
287281

288282
# check that the fill value is consistent with the data type
283+
fill = data["fill_value"]
289284
try:
290-
fill = _data["fill_value"]
291-
fill_value_parsed = data_type.from_json_scalar(fill, zarr_format=3)
292-
285+
fill_value_parsed = data_type.from_json_scalar(fill, zarr_format=3) # type: ignore[arg-type]
293286
except ValueError as e:
294287
raise TypeError(f"Invalid fill_value: {fill!r}") from e
295288

296289
# dimension_names key is optional, normalize missing to `None`
297-
dimension_names = _data.pop("dimension_names", None)
290+
dimension_names = data.get("dimension_names", None)
298291

299292
# attributes key is optional, normalize missing to `None`
300-
attributes = _data.pop("attributes", None)
293+
attributes = data.get("attributes", None)
294+
295+
# storage transformers key is optional, normalize missing to `None`
296+
storage_transformers = data.get("storage_transformers", None)
301297

302298
return cls(
303299
shape=data["shape"],
304-
chunk_grid=data["chunk_grid"],
305-
chunk_key_encoding=data["chunk_key_encoding"],
306-
codecs=data["codecs"],
307-
attributes=attributes,
300+
chunk_grid=data["chunk_grid"], # type: ignore[arg-type]
301+
chunk_key_encoding=data["chunk_key_encoding"], # type: ignore[arg-type]
302+
codecs=data["codecs"], # type: ignore[arg-type]
303+
attributes=attributes, # type: ignore[arg-type]
308304
data_type=data_type,
309305
fill_value=fill_value_parsed,
310306
dimension_names=dimension_names,
311-
storage_transformers=_data.get("storage_transformers", None),
307+
storage_transformers=storage_transformers, # type: ignore[arg-type]
312308
) # type: ignore[arg-type]
313309

314310
def to_dict(self) -> dict[str, JSON]:

0 commit comments

Comments
 (0)