33import functools
44import logging
55import urllib .parse
6+ import warnings
67from collections .abc import AsyncGenerator , Sequence
78from dataclasses import dataclass , field
89from pathlib import Path
1112import aioboto3
1213from aiobotocore .session import ClientCreatorContext
1314from boto3 .s3 .transfer import TransferConfig
15+ from botocore import __version__ as botocore_version
1416from botocore import exceptions as botocore_exc
1517from botocore .client import Config
1618from models_library .api_schemas_storage .storage_schemas import (
2022)
2123from models_library .basic_types import SHA256Str
2224from models_library .bytes_iters import BytesIter , DataSize
25+ from packaging import version
2326from pydantic import AnyUrl , ByteSize , TypeAdapter
2427from servicelib .bytes_iters import DEFAULT_READ_CHUNK_SIZE , BytesStreamer
2528from servicelib .logging_utils import log_catch , log_context
5154)
5255from ._utils import compute_num_file_chunks , create_final_prefix
5356
57+ _BOTOCORE_VERSION : Final [version .Version ] = version .parse (botocore_version )
58+ _MAX_BOTOCORE_VERSION_COMPATIBLE_WITH_CEPH_S3 : Final [version .Version ] = version .parse (
59+ "1.36.0"
60+ )
61+
62+
63+ def _check_botocore_version () -> None :
64+ if _BOTOCORE_VERSION >= _MAX_BOTOCORE_VERSION_COMPATIBLE_WITH_CEPH_S3 :
65+ warnings .warn (
66+ f"Botocore version { botocore_version } is not supported for file uploads with CEPH S3 until CEPH is updated. "
67+ "Please use a version < 1.36.0. The upload operation will likely fail." ,
68+ RuntimeWarning ,
69+ stacklevel = 2 ,
70+ )
71+
72+
5473_logger = logging .getLogger (__name__ )
5574
5675_S3_MAX_CONCURRENCY_DEFAULT : Final [int ] = 10
@@ -504,6 +523,9 @@ async def upload_file(
504523 bytes_transfered_cb : UploadedBytesTransferredCallback | None ,
505524 ) -> None :
506525 """upload a file using aioboto3 transfer manager (e.g. works >5Gb and creates multiple threads)"""
526+
527+ _check_botocore_version ()
528+
507529 upload_options : dict [str , Any ] = {
508530 "Bucket" : bucket ,
509531 "Key" : object_key ,
@@ -528,6 +550,9 @@ async def copy_object(
528550 object_metadata : S3MetaData | None = None ,
529551 ) -> None :
530552 """copy a file in S3 using aioboto3 transfer manager (e.g. works >5Gb and creates multiple threads)"""
553+
554+ _check_botocore_version ()
555+
531556 copy_options : dict [str , Any ] = {
532557 "CopySource" : {"Bucket" : bucket , "Key" : src_object_key },
533558 "Bucket" : bucket ,
@@ -634,6 +659,7 @@ async def upload_object_from_file_like(
634659 file_like_reader : FileLikeReader ,
635660 ) -> None :
636661 """streams write an object in S3 from an AsyncIterable[bytes]"""
662+ _check_botocore_version ()
637663 await self ._client .upload_fileobj (file_like_reader , bucket_name , object_key ) # type: ignore[arg-type]
638664
639665 @staticmethod
0 commit comments