Skip to content

Commit 0e34581

Browse files
authored
Merge pull request #136 from awslabs/head_object_with_retry
Making test_head_object_with_retry public again
2 parents 5180122 + 19940c1 commit 0e34581

File tree

6 files changed

+27
-6
lines changed

6 files changed

+27
-6
lines changed

awswrangler/pandas.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -111,7 +111,7 @@ def _read_csv_iterator(self, bucket_name, key_path, max_result_size=200_000_000,
111111
if pd_additional_kwargs['compression'] is not None:
112112
raise InvalidParameters("max_result_size currently does not support compressed files")
113113

114-
metadata = S3._head_object_with_retry(client_s3=self._client_s3, bucket=bucket_name, key=key_path)
114+
metadata = S3.head_object_with_retry(client_s3=self._client_s3, bucket=bucket_name, key=key_path)
115115
total_size = metadata["ContentLength"]
116116
logger.debug(f"total_size: {total_size}")
117117
if total_size <= 0:

awswrangler/s3.py

Lines changed: 12 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -323,7 +323,17 @@ def list_objects(self, path: str) -> List[str]:
323323
stop=tenacity.stop_after_attempt(max_attempt_number=10),
324324
reraise=True,
325325
after=tenacity.after_log(logger, INFO))
326-
def _head_object_with_retry(client_s3: client, bucket: str, key: str) -> Dict[str, Any]:
326+
def head_object_with_retry(client_s3: client, bucket: str, key: str) -> Dict[str, Any]:
327+
"""
328+
Executes the Boto3 head_object() function with an extra layer of random exponential back-off.
329+
330+
https://boto3.amazonaws.com/v1/documentation/api/latest/reference/services/s3.html#S3.Client.head_object
331+
332+
:param client_s3: Boto3 S3 client
333+
:param bucket: Bucket name.
334+
:param key: Key
335+
:return: Boto3 head_object() regular response dictionary
336+
"""
327337
return client_s3.head_object(Bucket=bucket, Key=key)
328338

329339
@staticmethod
@@ -336,7 +346,7 @@ def _get_objects_head_remote(send_pipe, session_primitives: "SessionPrimitives",
336346
logger.debug(f"len(objects_paths): {len(objects_paths)}")
337347
for object_path in objects_paths:
338348
bucket, key = object_path.replace("s3://", "").split("/", 1)
339-
res = S3._head_object_with_retry(client_s3=client_s3, bucket=bucket, key=key)
349+
res = S3.head_object_with_retry(client_s3=client_s3, bucket=bucket, key=key)
340350
size = res["ContentLength"]
341351
objects_sizes[object_path] = size
342352
logger.debug(f"len(objects_sizes): {len(objects_sizes)}")

testing/build-image.sh

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,5 @@
11
#!/usr/bin/env bash
2-
set -e
2+
set -ex
33

44
cp ../requirements.txt .
55
cp ../requirements-dev.txt .

testing/deploy-cloudformation.sh

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,5 @@
11
#!/usr/bin/env bash
2-
set -e
2+
set -ex
33

44
aws cloudformation deploy \
55
--template-file template.yaml \

testing/start-jupyter.sh

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,5 @@
11
#!/usr/bin/env bash
2-
set -e
2+
set -ex
33

44
cd ..
55
pip install -e .

testing/test_awswrangler/test_s3.py

Lines changed: 11 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -231,3 +231,14 @@ def test_copy_listed_objects(session, bucket, database, mode, procs_io_bound):
231231
def test_wait_object_exists(bucket):
232232
with pytest.raises(S3WaitObjectTimeout):
233233
wr.s3.wait_object_exists(path=f"s3://{bucket}/test_wait_object_exists.txt", timeout=5.0)
234+
235+
236+
def test_head_object_with_retry(bucket):
237+
key = "test_head_object_with_retry"
238+
boto3.resource("s3").Object(bucket, key).put(Body=str("Hello!"))
239+
res = wr.s3.head_object_with_retry(
240+
client_s3=boto3.client("s3"),
241+
bucket=bucket,
242+
key=key
243+
)
244+
assert res["ResponseMetadata"]["HTTPHeaders"]["content-length"] == "6"

0 commit comments

Comments
 (0)