Skip to content

Commit 8455f6b

Browse files
Merge pull request #219 from gqmelo/gqmelo/fix-s3-multipart-log
s3: fix multipart upload debug log
2 parents 33b04b7 + e4bac81 commit 8455f6b

File tree

2 files changed

+50
-50
lines changed

2 files changed

+50
-50
lines changed

rohmu/object_storage/s3.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -542,7 +542,7 @@ def multipart_upload_file_object(
542542
chunks, chunk_size = self.calculate_chunks_and_chunk_size(size)
543543
args, sanitized_metadata, path = self._init_args_for_multipart(key, metadata, mimetype, cache_control)
544544
self.log.debug(
545-
"Starting to upload multipart file: %r, size: %s, chunks: %d (chunk size: %d)",
545+
"Starting to upload multipart file: %r, size: %s, chunks: %s (chunk size: %d)",
546546
path,
547547
size,
548548
chunks if size is not None else "<?>",

test/object_storage/test_s3.py

Lines changed: 49 additions & 49 deletions
Original file line numberDiff line numberDiff line change
@@ -13,14 +13,15 @@
1313
from rohmu.object_storage.config import S3_MAX_NUM_PARTS_PER_UPLOAD, S3ObjectStorageConfig
1414
from rohmu.object_storage.s3 import S3Transfer
1515
from tempfile import NamedTemporaryFile
16-
from typing import Any, BinaryIO, Callable, cast, Iterator, Optional, Union
16+
from typing import Any, BinaryIO, Callable, Iterator, Optional, Union
1717
from unittest.mock import ANY, call, MagicMock, patch
1818

1919
import botocore.exceptions
2020
import contextlib
2121
import logging
2222
import math
2323
import pytest
24+
import re
2425
import rohmu.object_storage.s3
2526

2627
log = logging.getLogger(__name__)
@@ -672,7 +673,9 @@ def randbytes(size: int) -> bytes:
672673

673674
# All other tests aggressively mock this function, so let's test it now once and for all.
674675
@pytest.mark.parametrize("_size", [999999, None])
675-
def test_multipart_upload_file_object(mocker: Any, _size: Optional[int]) -> None:
676+
def test_multipart_upload_file_object(
677+
mocker: Any, tmp_path: Path, caplog: pytest.LogCaptureFixture, _size: Optional[int]
678+
) -> None:
676679
cache_control = "no-cache"
677680
key = "test_key_123"
678681
metadata = {"name1": "test_meta_data", "key_b": "rfgdswe"}
@@ -681,7 +684,8 @@ def test_multipart_upload_file_object(mocker: Any, _size: Optional[int]) -> None
681684
size = _size if _size is not None else 999999
682685
test_data = randbytes(size)
683686
segment_size = round((size + 1) / 10)
684-
expected_chunks = round(size / segment_size) if _size is not None else "<?>"
687+
expected_chunks = str(round(size / segment_size)) if _size is not None else "<?>"
688+
expected_chunks = re.escape(expected_chunks)
685689

686690
transfer = make_mock_transfer(
687691
mocker,
@@ -692,48 +696,46 @@ def test_multipart_upload_file_object(mocker: Any, _size: Optional[int]) -> None
692696
"segment_size": segment_size,
693697
},
694698
)
695-
transfer.log = MagicMock()
696-
with NamedTemporaryFile() as tmpfile:
697-
tmpfile.write(test_data)
698-
tmpfile.flush()
699-
tmpfile.seek(0)
700-
701-
transfer.multipart_upload_file_object(
702-
cache_control=cache_control,
703-
fp=cast(BinaryIO, tmpfile),
704-
key=key,
705-
metadata=metadata,
706-
mimetype=mimetype,
707-
progress_fn=progress_fn,
708-
size=_size,
709-
)
710-
711-
log.info(f"size = {_size}, {size}")
712-
log.info(transfer.log.mock_calls)
713-
expected = [
714-
call.debug(
715-
"Starting to upload multipart file: %r, size: %s, chunks: %d (chunk size: %d)",
716-
"test-prefix/test_key_123",
717-
_size,
718-
expected_chunks,
719-
segment_size,
720-
),
721-
call.info("Uploaded part %s of %s, size %s in %.2fs", 1, expected_chunks, segment_size, ANY),
722-
call.info("Uploaded part %s of %s, size %s in %.2fs", 2, expected_chunks, segment_size, ANY),
723-
call.info("Uploaded part %s of %s, size %s in %.2fs", 3, expected_chunks, segment_size, ANY),
724-
call.info("Uploaded part %s of %s, size %s in %.2fs", 4, expected_chunks, segment_size, ANY),
725-
call.info("Uploaded part %s of %s, size %s in %.2fs", 5, expected_chunks, segment_size, ANY),
726-
call.info("Uploaded part %s of %s, size %s in %.2fs", 6, expected_chunks, segment_size, ANY),
727-
call.info("Uploaded part %s of %s, size %s in %.2fs", 7, expected_chunks, segment_size, ANY),
728-
call.info("Uploaded part %s of %s, size %s in %.2fs", 8, expected_chunks, segment_size, ANY),
729-
call.info("Uploaded part %s of %s, size %s in %.2fs", 9, expected_chunks, segment_size, ANY),
730-
call.info("Uploaded part %s of %s, size %s in %.2fs", 10, expected_chunks, ANY, ANY),
731-
call.info("Multipart upload of %r complete, size: %r, took: %.2fs", "test-prefix/test_key_123", _size, ANY),
732-
]
733-
transfer.log.assert_has_calls(expected)
699+
tmpfile = tmp_path / "file_to_upload.bin"
700+
tmpfile.write_bytes(test_data)
734701

702+
log.info(f"size = {_size}, {size}")
735703

736-
def test_multipart_upload_source_file_empty(mocker: Any) -> None:
704+
with open(tmpfile, "rb") as fp:
705+
with caplog.at_level(logging.DEBUG):
706+
transfer.multipart_upload_file_object(
707+
cache_control=cache_control,
708+
fp=fp,
709+
key=key,
710+
metadata=metadata,
711+
mimetype=mimetype,
712+
progress_fn=progress_fn,
713+
size=_size,
714+
)
715+
log_lines = [rec.message for rec in caplog.records]
716+
line_matcher = pytest.LineMatcher(log_lines)
717+
expected = [
718+
(
719+
rf"Starting to upload multipart file: 'test-prefix/test_key_123', size: {_size},"
720+
rf" chunks: {expected_chunks} \(chunk size: 100000\)"
721+
),
722+
rf"Uploaded part 1 of {expected_chunks}, size 100000 in [\d]+\.[\d]+s",
723+
rf"Uploaded part 2 of {expected_chunks}, size 100000 in [\d]+\.[\d]+s",
724+
rf"Uploaded part 3 of {expected_chunks}, size 100000 in [\d]+\.[\d]+s",
725+
rf"Uploaded part 4 of {expected_chunks}, size 100000 in [\d]+\.[\d]+s",
726+
rf"Uploaded part 5 of {expected_chunks}, size 100000 in [\d]+\.[\d]+s",
727+
rf"Uploaded part 6 of {expected_chunks}, size 100000 in [\d]+\.[\d]+s",
728+
rf"Uploaded part 7 of {expected_chunks}, size 100000 in [\d]+\.[\d]+s",
729+
rf"Uploaded part 8 of {expected_chunks}, size 100000 in [\d]+\.[\d]+s",
730+
rf"Uploaded part 9 of {expected_chunks}, size 100000 in [\d]+\.[\d]+s",
731+
rf"Uploaded part 10 of {expected_chunks}, size 99999 in [\d]+\.[\d]+s",
732+
rf"Multipart upload of 'test-prefix/test_key_123' complete, size: {_size}, took: [\d]+\.[\d]+s",
733+
]
734+
735+
line_matcher.re_match_lines(expected)
736+
737+
738+
def test_multipart_upload_source_file_empty(mocker: Any, tmp_path: Path) -> None:
737739
cache_control = "no-cache"
738740
key = "test_key_123"
739741
metadata = {"name1": "test_meta_data", "key_b": "rfgdswe"}
@@ -752,15 +754,13 @@ def test_multipart_upload_source_file_empty(mocker: Any) -> None:
752754
},
753755
)
754756
transfer.log = MagicMock()
755-
with NamedTemporaryFile() as tmpfile:
756-
tmpfile.write(b"")
757-
tmpfile.flush()
758-
tmpfile.seek(0)
759-
757+
tmpfile = tmp_path / "file_to_upload.bin"
758+
tmpfile.write_bytes(b"")
759+
with open(tmpfile, "rb") as fp:
760760
with pytest.raises(EOFError):
761761
transfer.multipart_upload_file_object(
762762
cache_control=cache_control,
763-
fp=cast(BinaryIO, tmpfile),
763+
fp=fp,
764764
key=key,
765765
metadata=metadata,
766766
mimetype=mimetype,

0 commit comments

Comments
 (0)