2323from types_aiobotocore_s3 .literals import BucketLocationConstraintType
2424from types_aiobotocore_s3 .type_defs import ObjectIdentifierTypeDef
2525
26- from ._constants import MULTIPART_UPLOADS_MIN_TOTAL_SIZE , PRESIGNED_LINK_MAX_SIZE
26+ from ._constants import MULTIPART_COPY_THRESHOLD , MULTIPART_UPLOADS_MIN_TOTAL_SIZE
2727from ._error_handler import s3_exception_handler , s3_exception_handler_async_gen
2828from ._errors import S3DestinationNotEmptyError , S3KeyNotFoundError
2929from ._models import (
@@ -407,6 +407,7 @@ async def copy_object(
407407 src_object_key : S3ObjectKey ,
408408 dst_object_key : S3ObjectKey ,
409409 bytes_transfered_cb : CopiedBytesTransferredCallback | None ,
410+ object_metadata : S3MetaData | None = None ,
410411 ) -> None :
411412 """copy a file in S3 using aioboto3 transfer manager (e.g. works >5Gb and creates multiple threads)"""
412413 copy_options : dict [str , Any ] = {
@@ -415,7 +416,7 @@ async def copy_object(
415416 "Key" : dst_object_key ,
416417 "Config" : TransferConfig (
417418 max_concurrency = self .transfer_max_concurrency ,
418- multipart_threshold = PRESIGNED_LINK_MAX_SIZE ,
419+ multipart_threshold = MULTIPART_COPY_THRESHOLD ,
419420 ),
420421 }
421422 if bytes_transfered_cb :
@@ -424,7 +425,16 @@ async def copy_object(
424425 bytes_transfered_cb , file_name = f"{ dst_object_key } "
425426 )
426427 }
428+ # NOTE: boto3 copy function uses copy_object until 'multipart_threshold' is reached then switches to multipart copy
429+ # copy_object does not provide any callbacks so we can't track progress so we need to ensure at least the completion
430+ # of the object is tracked
427431 await self ._client .copy (** copy_options )
432+ if bytes_transfered_cb :
433+ if object_metadata is None :
434+ object_metadata = await self .get_object_metadata (
435+ bucket = bucket , object_key = dst_object_key
436+ )
437+ bytes_transfered_cb (object_metadata .size , file_name = f"{ dst_object_key } " )
428438
429439 @s3_exception_handler (_logger )
430440 async def copy_objects_recursively (
@@ -448,6 +458,7 @@ async def copy_objects_recursively(
448458 src_object_key = s3_object .object_key ,
449459 dst_object_key = s3_object .object_key .replace (src_prefix , dst_prefix ),
450460 bytes_transfered_cb = bytes_transfered_cb ,
461+ object_metadata = s3_object ,
451462 )
452463 async for s3_object in self ._list_all_objects (
453464 bucket = bucket , prefix = src_prefix
0 commit comments