@@ -903,7 +903,7 @@ async def open(
903
903
store_path = await make_store_path (store )
904
904
metadata_dict = await get_array_metadata (store_path , zarr_format = zarr_format )
905
905
# TODO: remove this cast when we have better type hints
906
- _metadata_dict = cast (ArrayV3MetadataDict , metadata_dict )
906
+ _metadata_dict = cast (" ArrayV3MetadataDict" , metadata_dict )
907
907
return cls (store_path = store_path , metadata = _metadata_dict )
908
908
909
909
@property
@@ -1399,7 +1399,7 @@ async def _set_selection(
1399
1399
if isinstance (array_like , np ._typing ._SupportsArrayFunc ):
1400
1400
# TODO: need to handle array types that don't support __array_function__
1401
1401
# like PyTorch and JAX
1402
- array_like_ = cast (np ._typing ._SupportsArrayFunc , array_like )
1402
+ array_like_ = cast (" np._typing._SupportsArrayFunc" , array_like )
1403
1403
value = np .asanyarray (value , dtype = self .metadata .dtype , like = array_like_ )
1404
1404
else :
1405
1405
if not hasattr (value , "shape" ):
@@ -1413,7 +1413,7 @@ async def _set_selection(
1413
1413
value = value .astype (dtype = self .metadata .dtype , order = "A" )
1414
1414
else :
1415
1415
value = np .array (value , dtype = self .metadata .dtype , order = "A" )
1416
- value = cast (NDArrayLike , value )
1416
+ value = cast (" NDArrayLike" , value )
1417
1417
# We accept any ndarray like object from the user and convert it
1418
1418
# to a NDBuffer (or subclass). From this point onwards, we only pass
1419
1419
# Buffer and NDBuffer between components.
@@ -2437,11 +2437,11 @@ def __getitem__(self, selection: Selection) -> NDArrayLikeOrScalar:
2437
2437
"""
2438
2438
fields , pure_selection = pop_fields (selection )
2439
2439
if is_pure_fancy_indexing (pure_selection , self .ndim ):
2440
- return self .vindex [cast (CoordinateSelection | MaskSelection , selection )]
2440
+ return self .vindex [cast (" CoordinateSelection | MaskSelection" , selection )]
2441
2441
elif is_pure_orthogonal_indexing (pure_selection , self .ndim ):
2442
2442
return self .get_orthogonal_selection (pure_selection , fields = fields )
2443
2443
else :
2444
- return self .get_basic_selection (cast (BasicSelection , pure_selection ), fields = fields )
2444
+ return self .get_basic_selection (cast (" BasicSelection" , pure_selection ), fields = fields )
2445
2445
2446
2446
def __setitem__ (self , selection : Selection , value : npt .ArrayLike ) -> None :
2447
2447
"""Modify data for an item or region of the array.
@@ -2536,11 +2536,11 @@ def __setitem__(self, selection: Selection, value: npt.ArrayLike) -> None:
2536
2536
"""
2537
2537
fields , pure_selection = pop_fields (selection )
2538
2538
if is_pure_fancy_indexing (pure_selection , self .ndim ):
2539
- self .vindex [cast (CoordinateSelection | MaskSelection , selection )] = value
2539
+ self .vindex [cast (" CoordinateSelection | MaskSelection" , selection )] = value
2540
2540
elif is_pure_orthogonal_indexing (pure_selection , self .ndim ):
2541
2541
self .set_orthogonal_selection (pure_selection , value , fields = fields )
2542
2542
else :
2543
- self .set_basic_selection (cast (BasicSelection , pure_selection ), value , fields = fields )
2543
+ self .set_basic_selection (cast (" BasicSelection" , pure_selection ), value , fields = fields )
2544
2544
2545
2545
@_deprecate_positional_args
2546
2546
def get_basic_selection (
@@ -3658,7 +3658,7 @@ def update_attributes(self, new_attributes: dict[str, JSON]) -> Array:
3658
3658
# TODO: remove this cast when type inference improves
3659
3659
new_array = sync (self ._async_array .update_attributes (new_attributes ))
3660
3660
# TODO: remove this cast when type inference improves
3661
- _new_array = cast (AsyncArray [ArrayV2Metadata ] | AsyncArray [ArrayV3Metadata ], new_array )
3661
+ _new_array = cast (" AsyncArray[ArrayV2Metadata] | AsyncArray[ArrayV3Metadata]" , new_array )
3662
3662
return type (self )(_new_array )
3663
3663
3664
3664
def __repr__ (self ) -> str :
@@ -4253,7 +4253,7 @@ async def init_array(
4253
4253
serializer = serializer ,
4254
4254
dtype = dtype_parsed ,
4255
4255
)
4256
- sub_codecs = cast (tuple [Codec , ...], (* array_array , array_bytes , * bytes_bytes ))
4256
+ sub_codecs = cast (" tuple[Codec, ...]" , (* array_array , array_bytes , * bytes_bytes ))
4257
4257
codecs_out : tuple [Codec , ...]
4258
4258
if shard_shape_parsed is not None :
4259
4259
index_location = None
@@ -4524,7 +4524,7 @@ def _parse_keep_array_attr(
4524
4524
compressors = "auto"
4525
4525
if serializer == "keep" :
4526
4526
if zarr_format == 3 and data .metadata .zarr_format == 3 :
4527
- serializer = cast (SerializerLike , data .serializer )
4527
+ serializer = cast (" SerializerLike" , data .serializer )
4528
4528
else :
4529
4529
serializer = "auto"
4530
4530
if fill_value is None :
@@ -4702,7 +4702,7 @@ def _parse_chunk_encoding_v3(
4702
4702
if isinstance (filters , dict | Codec ):
4703
4703
maybe_array_array = (filters ,)
4704
4704
else :
4705
- maybe_array_array = cast (Iterable [Codec | dict [str , JSON ]], filters )
4705
+ maybe_array_array = cast (" Iterable[Codec | dict[str, JSON]]" , filters )
4706
4706
out_array_array = tuple (_parse_array_array_codec (c ) for c in maybe_array_array )
4707
4707
4708
4708
if serializer == "auto" :
@@ -4719,7 +4719,7 @@ def _parse_chunk_encoding_v3(
4719
4719
if isinstance (compressors , dict | Codec ):
4720
4720
maybe_bytes_bytes = (compressors ,)
4721
4721
else :
4722
- maybe_bytes_bytes = cast (Iterable [Codec | dict [str , JSON ]], compressors )
4722
+ maybe_bytes_bytes = cast (" Iterable[Codec | dict[str, JSON]]" , compressors )
4723
4723
4724
4724
out_bytes_bytes = tuple (_parse_bytes_bytes_codec (c ) for c in maybe_bytes_bytes )
4725
4725
0 commit comments