Skip to content

Commit 41724fb

Browse files
Apply ruff/flake8-simplify rule SIM118
SIM118 Use `key in dict` instead of `key in dict.keys()`
1 parent 8fc5bde commit 41724fb

File tree

12 files changed

+169
-205
lines changed

12 files changed

+169
-205
lines changed

src/zarr/codecs/sharding.py

Lines changed: 6 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -80,9 +80,9 @@ async def get(
8080
self, prototype: BufferPrototype, byte_range: ByteRangeRequest | None = None
8181
) -> Buffer | None:
8282
assert byte_range is None, "byte_range is not supported within shards"
83-
assert (
84-
prototype == default_buffer_prototype()
85-
), f"prototype is not supported within shards currently. diff: {prototype} != {default_buffer_prototype()}"
83+
assert prototype == default_buffer_prototype(), (
84+
f"prototype is not supported within shards currently. diff: {prototype} != {default_buffer_prototype()}"
85+
)
8686
return self.shard_dict.get(self.chunk_coords)
8787

8888

@@ -152,9 +152,9 @@ def is_dense(self, chunk_byte_length: int) -> bool:
152152
)
153153

154154
# Are all non-empty offsets unique?
155-
if len(
156-
{offset for offset, _ in sorted_offsets_and_lengths if offset != MAX_UINT_64}
157-
) != len(sorted_offsets_and_lengths):
155+
if len({
156+
offset for offset, _ in sorted_offsets_and_lengths if offset != MAX_UINT_64
157+
}) != len(sorted_offsets_and_lengths):
158158
return False
159159

160160
return all(

src/zarr/core/array.py

Lines changed: 6 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -1048,14 +1048,12 @@ async def _save_metadata(self, metadata: ArrayMetadata, ensure_parents: bool = F
10481048
parents = _build_parents(self)
10491049

10501050
for parent in parents:
1051-
awaitables.extend(
1052-
[
1053-
(parent.store_path / key).set_if_not_exists(value)
1054-
for key, value in parent.metadata.to_buffer_dict(
1055-
default_buffer_prototype()
1056-
).items()
1057-
]
1058-
)
1051+
awaitables.extend([
1052+
(parent.store_path / key).set_if_not_exists(value)
1053+
for key, value in parent.metadata.to_buffer_dict(
1054+
default_buffer_prototype()
1055+
).items()
1056+
])
10591057

10601058
await gather(*awaitables)
10611059

src/zarr/core/codec_pipeline.py

Lines changed: 4 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -235,12 +235,10 @@ async def read_batch(
235235
drop_axes: tuple[int, ...] = (),
236236
) -> None:
237237
if self.supports_partial_decode:
238-
chunk_array_batch = await self.decode_partial_batch(
239-
[
240-
(byte_getter, chunk_selection, chunk_spec)
241-
for byte_getter, chunk_spec, chunk_selection, _ in batch_info
242-
]
243-
)
238+
chunk_array_batch = await self.decode_partial_batch([
239+
(byte_getter, chunk_selection, chunk_spec)
240+
for byte_getter, chunk_spec, chunk_selection, _ in batch_info
241+
])
244242
for chunk_array, (_, chunk_spec, _, out_selection) in zip(
245243
chunk_array_batch, batch_info, strict=False
246244
):

src/zarr/core/group.py

Lines changed: 10 additions & 12 deletions
Original file line numberDiff line numberDiff line change
@@ -463,9 +463,9 @@ async def open(
463463
if use_consolidated or use_consolidated is None:
464464
paths.append(store_path / consolidated_key)
465465

466-
zgroup_bytes, zattrs_bytes, *rest = await asyncio.gather(
467-
*[path.get() for path in paths]
468-
)
466+
zgroup_bytes, zattrs_bytes, *rest = await asyncio.gather(*[
467+
path.get() for path in paths
468+
])
469469
if zgroup_bytes is None:
470470
raise FileNotFoundError(store_path)
471471

@@ -768,14 +768,12 @@ async def _save_metadata(self, ensure_parents: bool = False) -> None:
768768
if ensure_parents:
769769
parents = _build_parents(self)
770770
for parent in parents:
771-
awaitables.extend(
772-
[
773-
(parent.store_path / key).set_if_not_exists(value)
774-
for key, value in parent.metadata.to_buffer_dict(
775-
default_buffer_prototype()
776-
).items()
777-
]
778-
)
771+
awaitables.extend([
772+
(parent.store_path / key).set_if_not_exists(value)
773+
for key, value in parent.metadata.to_buffer_dict(
774+
default_buffer_prototype()
775+
).items()
776+
])
779777

780778
await asyncio.gather(*awaitables)
781779

@@ -1225,7 +1223,7 @@ def _members_consolidated(
12251223

12261224
# we kind of just want the top-level keys.
12271225
if consolidated_metadata is not None:
1228-
for key in consolidated_metadata.metadata.keys():
1226+
for key in consolidated_metadata.metadata:
12291227
obj = self._getitem_consolidated(
12301228
self.store_path, key, prefix=self.name
12311229
) # Metadata -> Group/Array

src/zarr/core/indexing.py

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -289,9 +289,9 @@ def is_pure_orthogonal_indexing(selection: Selection, ndim: int) -> TypeGuard[Or
289289
def get_chunk_shape(chunk_grid: ChunkGrid) -> ChunkCoords:
290290
from zarr.core.chunk_grids import RegularChunkGrid
291291

292-
assert isinstance(
293-
chunk_grid, RegularChunkGrid
294-
), "Only regular chunk grid is supported, currently."
292+
assert isinstance(chunk_grid, RegularChunkGrid), (
293+
"Only regular chunk grid is supported, currently."
294+
)
295295
return chunk_grid.chunk_shape
296296

297297

src/zarr/core/metadata/v3.py

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -299,9 +299,9 @@ def ndim(self) -> int:
299299
def get_chunk_spec(
300300
self, _chunk_coords: ChunkCoords, order: MemoryOrder, prototype: BufferPrototype
301301
) -> ArraySpec:
302-
assert isinstance(
303-
self.chunk_grid, RegularChunkGrid
304-
), "Currently, only regular chunk grid is supported"
302+
assert isinstance(self.chunk_grid, RegularChunkGrid), (
303+
"Currently, only regular chunk grid is supported"
304+
)
305305
return ArraySpec(
306306
shape=self.chunk_grid.chunk_shape,
307307
dtype=self.dtype,

src/zarr/testing/strategies.py

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -94,9 +94,9 @@ def np_array_and_chunks(
9494
# We want this strategy to shrink towards arrays with smaller number of chunks
9595
# 1. st.integers() shrinks towards smaller values. So we use that to generate number of chunks
9696
numchunks = draw(
97-
st.tuples(
98-
*[st.integers(min_value=0 if size == 0 else 1, max_value=size) for size in array.shape]
99-
)
97+
st.tuples(*[
98+
st.integers(min_value=0 if size == 0 else 1, max_value=size) for size in array.shape
99+
])
100100
)
101101
# 2. and now generate the chunks tuple
102102
chunks = tuple(

tests/test_api.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -154,7 +154,7 @@ def test_save(store: Store, n_args: int, n_kwargs: int) -> None:
154154
assert isinstance(group, Group)
155155
for array in group.array_values():
156156
assert_array_equal(array[:], data)
157-
for k in kwargs.keys():
157+
for k in kwargs:
158158
assert k in group
159159
assert group.nmembers() == n_args + n_kwargs
160160

tests/test_codec_entrypoints.py

Lines changed: 4 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -40,11 +40,9 @@ def test_entrypoint_pipeline() -> None:
4040
@pytest.mark.usefixtures("set_path")
4141
@pytest.mark.parametrize("buffer_name", ["TestEntrypointBuffer", "TestEntrypointGroup.Buffer"])
4242
def test_entrypoint_buffer(buffer_name: str) -> None:
43-
config.set(
44-
{
45-
"buffer": "package_with_entrypoint." + buffer_name,
46-
"ndbuffer": "package_with_entrypoint.TestEntrypointNDBuffer",
47-
}
48-
)
43+
config.set({
44+
"buffer": "package_with_entrypoint." + buffer_name,
45+
"ndbuffer": "package_with_entrypoint.TestEntrypointNDBuffer",
46+
})
4947
assert zarr.registry.get_buffer_class().__qualname__ == buffer_name
5048
assert zarr.registry.get_ndbuffer_class().__name__ == "TestEntrypointNDBuffer"

tests/test_group.py

Lines changed: 5 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -541,13 +541,11 @@ def test_group_child_iterators(store: Store, zarr_format: ZarrFormat, consolidat
541541
object.__setattr__(
542542
expected_group_values[0].metadata,
543543
"consolidated_metadata",
544-
ConsolidatedMetadata.from_dict(
545-
{
546-
"kind": "inline",
547-
"metadata": metadata,
548-
"must_understand": False,
549-
}
550-
),
544+
ConsolidatedMetadata.from_dict({
545+
"kind": "inline",
546+
"metadata": metadata,
547+
"must_understand": False,
548+
}),
551549
)
552550
object.__setattr__(
553551
expected_group_values[1].metadata,

0 commit comments

Comments
 (0)