@@ -3851,37 +3851,48 @@ async def from_array(
38513851 #TODO
38523852 """
38533853
3854- if chunks == "keep" :
3855- chunks = data .chunks
3856- if zarr_format is None :
3857- zarr_format = data .metadata .zarr_format
3858- if filters == "keep" :
3859- if zarr_format == data .metadata .zarr_format :
3860- filters = data .filters or None
3861- else :
3854+ if isinstance (data , Array ):
3855+ if chunks == "keep" :
3856+ chunks = data .chunks
3857+ if zarr_format is None :
3858+ zarr_format = data .metadata .zarr_format
3859+ if filters == "keep" :
3860+ if zarr_format == data .metadata .zarr_format :
3861+ filters = data .filters or None
3862+ else :
3863+ filters = "auto"
3864+ if compressors == "keep" :
3865+ if zarr_format == data .metadata .zarr_format :
3866+ compressors = data .compressors or None
3867+ else :
3868+ compressors = "auto"
3869+ if serializer == "keep" :
3870+ if zarr_format == 3 :
3871+ serializer = cast (SerializerLike , data .serializer )
3872+ else :
3873+ serializer = "auto"
3874+ if fill_value is None :
3875+ fill_value = data .fill_value
3876+ if order is None :
3877+ order = data .order
3878+ if chunk_key_encoding is None and zarr_format == data .metadata .zarr_format :
3879+ if isinstance (data .metadata , ArrayV2Metadata ):
3880+ chunk_key_encoding = {"name" : "v2" , "separator" : data .metadata .dimension_separator }
3881+ elif isinstance (data .metadata , ArrayV3Metadata ):
3882+ chunk_key_encoding = data .metadata .chunk_key_encoding
3883+ if dimension_names is None and data .metadata .zarr_format == 3 :
3884+ dimension_names = data .metadata .dimension_names
3885+ else :
3886+ if chunks == "keep" :
3887+ chunks = "auto"
3888+ if zarr_format is None :
3889+ zarr_format = 3
3890+ if filters == "keep" :
38623891 filters = "auto"
3863- if compressors == "keep" :
3864- if zarr_format == data .metadata .zarr_format :
3865- compressors = data .compressors or None
3866- else :
3892+ if compressors == "keep" :
38673893 compressors = "auto"
3868- if serializer == "keep" :
3869- if zarr_format == 3 :
3870- serializer = cast (SerializerLike , data .serializer )
3871- else :
3894+ if serializer == "keep" :
38723895 serializer = "auto"
3873- if fill_value is None :
3874- fill_value = data .fill_value
3875- if order is None :
3876- order = data .order
3877- if chunk_key_encoding is None and zarr_format == data .metadata .zarr_format :
3878- if isinstance (data .metadata , ArrayV2Metadata ):
3879- chunk_key_encoding = {"name" : "v2" , "separator" : data .metadata .dimension_separator }
3880- elif isinstance (data .metadata , ArrayV3Metadata ):
3881- chunk_key_encoding = data .metadata .chunk_key_encoding
3882- if dimension_names is None and data .metadata .zarr_format == 3 :
3883- dimension_names = data .metadata .dimension_names
3884-
38853896 new_array = await create_array (
38863897 store ,
38873898 name = name ,
@@ -3902,17 +3913,29 @@ async def from_array(
39023913 overwrite = overwrite ,
39033914 config = config ,
39043915 )
3916+ if isinstance (data , Array ):
39053917
3906- async def _copy_region (chunk_coords : ChunkCoords | slice , _data : Array ) -> None :
3907- arr = await _data ._async_array .getitem (chunk_coords )
3908- await new_array .setitem (chunk_coords , arr )
3918+ async def _copy_region (chunk_coords : ChunkCoords | slice , _data : Array ) -> None :
3919+ arr = await _data ._async_array .getitem (chunk_coords )
3920+ await new_array .setitem (chunk_coords , arr )
39093921
3910- # Stream data from the source array to the new array
3911- await concurrent_map (
3912- [(region , data ) for region in new_array ._iter_chunk_regions ()],
3913- _copy_region ,
3914- zarr .core .config .config .get ("async.concurrency" ),
3915- )
3922+ # Stream data from the source array to the new array
3923+ await concurrent_map (
3924+ [(region , data ) for region in new_array ._iter_chunk_regions ()],
3925+ _copy_region ,
3926+ zarr .core .config .config .get ("async.concurrency" ),
3927+ )
3928+ else :
3929+
3930+ async def _copy_region (chunk_coords : ChunkCoords | slice , _data : npt .ArrayLike ) -> None :
3931+ await new_array .setitem (chunk_coords , _data [chunk_coords ])
3932+
3933+ # Stream data from the source array to the new array
3934+ await concurrent_map (
3935+ [(region , data ) for region in new_array ._iter_chunk_regions ()],
3936+ _copy_region ,
3937+ zarr .core .config .config .get ("async.concurrency" ),
3938+ )
39163939 return new_array
39173940
39183941
0 commit comments