33import functools
44import logging
55import urllib .parse
6- import warnings
76from collections .abc import AsyncGenerator , Sequence
87from dataclasses import dataclass , field
98from pathlib import Path
1211import aioboto3
1312from aiobotocore .session import ClientCreatorContext
1413from boto3 .s3 .transfer import TransferConfig
15- from botocore import __version__ as botocore_version
1614from botocore import exceptions as botocore_exc
1715from botocore .client import Config
1816from models_library .api_schemas_storage .storage_schemas import (
2220)
2321from models_library .basic_types import SHA256Str
2422from models_library .bytes_iters import BytesIter , DataSize
25- from packaging import version
2623from pydantic import AnyUrl , ByteSize , TypeAdapter
2724from servicelib .bytes_iters import DEFAULT_READ_CHUNK_SIZE , BytesStreamer
2825from servicelib .logging_utils import log_catch , log_context
5451)
5552from ._utils import compute_num_file_chunks , create_final_prefix
5653
57- _BOTOCORE_VERSION : Final [version .Version ] = version .parse (botocore_version )
58- _MAX_BOTOCORE_VERSION_COMPATIBLE_WITH_CEPH_S3 : Final [version .Version ] = version .parse (
59- "1.36.0"
60- )
61-
62-
63- def _check_botocore_version () -> None :
64- if _BOTOCORE_VERSION >= _MAX_BOTOCORE_VERSION_COMPATIBLE_WITH_CEPH_S3 :
65- warnings .warn (
66- f"Botocore version { botocore_version } is not supported for file uploads with CEPH S3 until CEPH is updated. "
67- "Please use a version < 1.36.0. The upload operation will likely fail." ,
68- RuntimeWarning ,
69- stacklevel = 2 ,
70- )
71-
72-
7354_logger = logging .getLogger (__name__ )
7455
7556_S3_MAX_CONCURRENCY_DEFAULT : Final [int ] = 10
@@ -107,13 +88,21 @@ async def create(
10788 session_client = None
10889 exit_stack = contextlib .AsyncExitStack ()
10990 try :
91+ config = Config (
92+ # This setting tells the S3 client to only calculate checksums when explicitly required
93+ # by the operation. This avoids unnecessary checksum calculations for operations that
94+ # don't need them, improving performance.
95+ # See: https://boto3.amazonaws.com/v1/documentation/api/latest/guide/s3.html#calculating-checksums
96+ signature_version = "s3v4" ,
97+ request_checksum_calculation = "when_required" , # type: ignore[call-arg]
98+ )
11099 session_client = session .client ( # type: ignore[call-overload]
111100 "s3" ,
112101 endpoint_url = f"{ settings .S3_ENDPOINT } " ,
113102 aws_access_key_id = settings .S3_ACCESS_KEY ,
114103 aws_secret_access_key = settings .S3_SECRET_KEY ,
115104 region_name = settings .S3_REGION ,
116- config = Config ( signature_version = "s3v4" ) ,
105+ config = config ,
117106 )
118107 assert isinstance (session_client , ClientCreatorContext ) # nosec
119108
@@ -523,9 +512,6 @@ async def upload_file(
523512 bytes_transfered_cb : UploadedBytesTransferredCallback | None ,
524513 ) -> None :
525514 """upload a file using aioboto3 transfer manager (e.g. works >5Gb and creates multiple threads)"""
526-
527- _check_botocore_version ()
528-
529515 upload_options : dict [str , Any ] = {
530516 "Bucket" : bucket ,
531517 "Key" : object_key ,
@@ -550,9 +536,6 @@ async def copy_object(
550536 object_metadata : S3MetaData | None = None ,
551537 ) -> None :
552538 """copy a file in S3 using aioboto3 transfer manager (e.g. works >5Gb and creates multiple threads)"""
553-
554- _check_botocore_version ()
555-
556539 copy_options : dict [str , Any ] = {
557540 "CopySource" : {"Bucket" : bucket , "Key" : src_object_key },
558541 "Bucket" : bucket ,
@@ -659,7 +642,6 @@ async def upload_object_from_file_like(
659642 file_like_reader : FileLikeReader ,
660643 ) -> None :
661644 """streams write an object in S3 from an AsyncIterable[bytes]"""
662- _check_botocore_version ()
663645 await self ._client .upload_fileobj (file_like_reader , bucket_name , object_key ) # type: ignore[arg-type]
664646
665647 @staticmethod
0 commit comments