33import functools
44import logging
55import urllib .parse
6- import warnings
76from collections .abc import AsyncGenerator , Sequence
87from dataclasses import dataclass , field
98from pathlib import Path
1211import aioboto3
1312from aiobotocore .session import ClientCreatorContext
1413from boto3 .s3 .transfer import TransferConfig
15- from botocore import __version__ as botocore_version
1614from botocore import exceptions as botocore_exc
1715from botocore .client import Config
1816from models_library .api_schemas_storage .storage_schemas import (
2220)
2321from models_library .basic_types import SHA256Str
2422from models_library .bytes_iters import BytesIter , DataSize
25- from packaging import version
2623from pydantic import AnyUrl , ByteSize , TypeAdapter
2724from servicelib .bytes_iters import DEFAULT_READ_CHUNK_SIZE , BytesStreamer
2825from servicelib .logging_utils import log_catch , log_context
5451)
5552from ._utils import compute_num_file_chunks , create_final_prefix
5653
57- _BOTOCORE_VERSION : Final [version .Version ] = version .parse (botocore_version )
58- _MAX_BOTOCORE_VERSION_COMPATIBLE_WITH_CEPH_S3 : Final [version .Version ] = version .parse (
59- "1.36.0"
60- )
61-
62-
63- def _check_botocore_version () -> None :
64- if _BOTOCORE_VERSION >= _MAX_BOTOCORE_VERSION_COMPATIBLE_WITH_CEPH_S3 :
65- warnings .warn (
66- f"Botocore version { botocore_version } is not supported for file uploads with CEPH S3 until CEPH is updated. "
67- "Please use a version < 1.36.0. The upload operation will likely fail." ,
68- RuntimeWarning ,
69- stacklevel = 2 ,
70- )
71-
72-
7354_logger = logging .getLogger (__name__ )
7455
7556_S3_MAX_CONCURRENCY_DEFAULT : Final [int ] = 10
@@ -526,9 +507,6 @@ async def upload_file(
526507 bytes_transfered_cb : UploadedBytesTransferredCallback | None ,
527508 ) -> None :
528509 """upload a file using aioboto3 transfer manager (e.g. works >5Gb and creates multiple threads)"""
529-
530- _check_botocore_version ()
531-
532510 upload_options : dict [str , Any ] = {
533511 "Bucket" : bucket ,
534512 "Key" : object_key ,
@@ -553,9 +531,6 @@ async def copy_object(
553531 object_metadata : S3MetaData | None = None ,
554532 ) -> None :
555533 """copy a file in S3 using aioboto3 transfer manager (e.g. works >5Gb and creates multiple threads)"""
556-
557- _check_botocore_version ()
558-
559534 copy_options : dict [str , Any ] = {
560535 "CopySource" : {"Bucket" : bucket , "Key" : src_object_key },
561536 "Bucket" : bucket ,
@@ -662,7 +637,6 @@ async def upload_object_from_file_like(
662637 file_like_reader : FileLikeReader ,
663638 ) -> None :
664639 """streams write an object in S3 from an AsyncIterable[bytes]"""
665- _check_botocore_version ()
666640 await self ._client .upload_fileobj (file_like_reader , bucket_name , object_key ) # type: ignore[arg-type]
667641
668642 @staticmethod
0 commit comments