Skip to content

Commit 1541e9f

Browse files
GSVarshapvital
authored andcommitted
tests(s3): Add new testcase to verify boto3.resource().Bucket().upload_fileobj()
Signed-off-by: Varsha GS <[email protected]>
1 parent ee42830 commit 1541e9f

File tree

1 file changed

+81
-46
lines changed

1 file changed

+81
-46
lines changed

tests/clients/boto3/test_boto3_s3.py

Lines changed: 81 additions & 46 deletions
Original file line numberDiff line numberDiff line change
@@ -2,10 +2,12 @@
22
# (c) Copyright Instana Inc. 2020
33

44
import os
5+
from io import BytesIO
6+
57
import pytest
8+
import boto3
69
from typing import Generator
710
from moto import mock_aws
8-
import boto3
911

1012
from instana.singletons import tracer, agent
1113
from tests.helpers import get_first_span_by_filter
@@ -18,13 +20,18 @@
1820

1921

2022
class TestS3:
23+
@classmethod
24+
def setup_class(cls) -> None:
25+
cls.bucket_name = "aws_bucket_name"
26+
cls.object_name = "aws_key_name"
27+
cls.recorder = tracer.span_processor
28+
cls.mock = mock_aws()
29+
2130
@pytest.fixture(autouse=True)
2231
def _resource(self) -> Generator[None, None, None]:
2332
"""Setup and Teardown"""
2433
# Clear all spans before a test run
25-
self.recorder = tracer.span_processor
2634
self.recorder.clear_spans()
27-
self.mock = mock_aws()
2835
self.mock.start()
2936
self.s3 = boto3.client("s3", region_name="us-east-1")
3037
yield
@@ -33,19 +40,19 @@ def _resource(self) -> Generator[None, None, None]:
3340
agent.options.allow_exit_as_root = False
3441

3542
def test_vanilla_create_bucket(self) -> None:
36-
self.s3.create_bucket(Bucket="aws_bucket_name")
43+
self.s3.create_bucket(Bucket=self.bucket_name)
3744

3845
result = self.s3.list_buckets()
3946
assert len(result["Buckets"]) == 1
40-
assert result["Buckets"][0]["Name"] == "aws_bucket_name"
47+
assert result["Buckets"][0]["Name"] == self.bucket_name
4148

4249
def test_s3_create_bucket(self) -> None:
4350
with tracer.start_as_current_span("test"):
44-
self.s3.create_bucket(Bucket="aws_bucket_name")
51+
self.s3.create_bucket(Bucket=self.bucket_name)
4552

4653
result = self.s3.list_buckets()
4754
assert len(result["Buckets"]) == 1
48-
assert result["Buckets"][0]["Name"] == "aws_bucket_name"
55+
assert result["Buckets"][0]["Name"] == self.bucket_name
4956

5057
spans = self.recorder.queued_spans()
5158
assert len(spans) == 2
@@ -65,11 +72,11 @@ def test_s3_create_bucket(self) -> None:
6572
assert not s3_span.ec
6673

6774
assert s3_span.data["s3"]["op"] == "CreateBucket"
68-
assert s3_span.data["s3"]["bucket"] == "aws_bucket_name"
75+
assert s3_span.data["s3"]["bucket"] == self.bucket_name
6976

7077
def test_s3_create_bucket_as_root_exit_span(self) -> None:
7178
agent.options.allow_exit_as_root = True
72-
self.s3.create_bucket(Bucket="aws_bucket_name")
79+
self.s3.create_bucket(Bucket=self.bucket_name)
7380

7481
agent.options.allow_exit_as_root = False
7582
self.s3.list_buckets()
@@ -83,7 +90,7 @@ def test_s3_create_bucket_as_root_exit_span(self) -> None:
8390
assert not s3_span.ec
8491

8592
assert s3_span.data["s3"]["op"] == "CreateBucket"
86-
assert s3_span.data["s3"]["bucket"] == "aws_bucket_name"
93+
assert s3_span.data["s3"]["bucket"] == self.bucket_name
8794

8895
def test_s3_list_buckets(self) -> None:
8996
with tracer.start_as_current_span("test"):
@@ -113,21 +120,15 @@ def test_s3_list_buckets(self) -> None:
113120
assert not s3_span.data["s3"]["bucket"]
114121

115122
def test_s3_vanilla_upload_file(self) -> None:
116-
object_name = "aws_key_name"
117-
bucket_name = "aws_bucket_name"
118-
119-
self.s3.create_bucket(Bucket=bucket_name)
120-
result = self.s3.upload_file(upload_filename, bucket_name, object_name)
123+
self.s3.create_bucket(Bucket=self.bucket_name)
124+
result = self.s3.upload_file(upload_filename, self.bucket_name, self.object_name)
121125
assert not result
122126

123127
def test_s3_upload_file(self) -> None:
124-
object_name = "aws_key_name"
125-
bucket_name = "aws_bucket_name"
126-
127-
self.s3.create_bucket(Bucket=bucket_name)
128+
self.s3.create_bucket(Bucket=self.bucket_name)
128129

129130
with tracer.start_as_current_span("test"):
130-
self.s3.upload_file(upload_filename, bucket_name, object_name)
131+
self.s3.upload_file(upload_filename, self.bucket_name, self.object_name)
131132

132133
spans = self.recorder.queued_spans()
133134
assert len(spans) == 2
@@ -147,17 +148,14 @@ def test_s3_upload_file(self) -> None:
147148
assert not s3_span.ec
148149

149150
assert s3_span.data["s3"]["op"] == "UploadFile"
150-
assert s3_span.data["s3"]["bucket"] == "aws_bucket_name"
151+
assert s3_span.data["s3"]["bucket"] == self.bucket_name
151152

152153
def test_s3_upload_file_obj(self) -> None:
153-
object_name = "aws_key_name"
154-
bucket_name = "aws_bucket_name"
155-
156-
self.s3.create_bucket(Bucket=bucket_name)
154+
self.s3.create_bucket(Bucket=self.bucket_name)
157155

158156
with tracer.start_as_current_span("test"):
159157
with open(upload_filename, "rb") as fd:
160-
self.s3.upload_fileobj(fd, bucket_name, object_name)
158+
self.s3.upload_fileobj(fd, self.bucket_name, self.object_name)
161159

162160
spans = self.recorder.queued_spans()
163161
assert len(spans) == 2
@@ -177,17 +175,14 @@ def test_s3_upload_file_obj(self) -> None:
177175
assert not s3_span.ec
178176

179177
assert s3_span.data["s3"]["op"] == "UploadFileObj"
180-
assert s3_span.data["s3"]["bucket"] == "aws_bucket_name"
178+
assert s3_span.data["s3"]["bucket"] == self.bucket_name
181179

182180
def test_s3_download_file(self) -> None:
183-
object_name = "aws_key_name"
184-
bucket_name = "aws_bucket_name"
185-
186-
self.s3.create_bucket(Bucket=bucket_name)
187-
self.s3.upload_file(upload_filename, bucket_name, object_name)
181+
self.s3.create_bucket(Bucket=self.bucket_name)
182+
self.s3.upload_file(upload_filename, self.bucket_name, self.object_name)
188183

189184
with tracer.start_as_current_span("test"):
190-
self.s3.download_file(bucket_name, object_name, download_target_filename)
185+
self.s3.download_file(self.bucket_name, self.object_name, download_target_filename)
191186

192187
spans = self.recorder.queued_spans()
193188
assert len(spans) == 2
@@ -207,18 +202,15 @@ def test_s3_download_file(self) -> None:
207202
assert not s3_span.ec
208203

209204
assert s3_span.data["s3"]["op"] == "DownloadFile"
210-
assert s3_span.data["s3"]["bucket"] == "aws_bucket_name"
205+
assert s3_span.data["s3"]["bucket"] == self.bucket_name
211206

212207
def test_s3_download_file_obj(self) -> None:
213-
object_name = "aws_key_name"
214-
bucket_name = "aws_bucket_name"
215-
216-
self.s3.create_bucket(Bucket=bucket_name)
217-
self.s3.upload_file(upload_filename, bucket_name, object_name)
208+
self.s3.create_bucket(Bucket=self.bucket_name)
209+
self.s3.upload_file(upload_filename, self.bucket_name, self.object_name)
218210

219211
with tracer.start_as_current_span("test"):
220212
with open(download_target_filename, "wb") as fd:
221-
self.s3.download_fileobj(bucket_name, object_name, fd)
213+
self.s3.download_fileobj(self.bucket_name, self.object_name, fd)
222214

223215
spans = self.recorder.queued_spans()
224216
assert len(spans) == 2
@@ -238,15 +230,13 @@ def test_s3_download_file_obj(self) -> None:
238230
assert not s3_span.ec
239231

240232
assert s3_span.data["s3"]["op"] == "DownloadFileObj"
241-
assert s3_span.data["s3"]["bucket"] == "aws_bucket_name"
233+
assert s3_span.data["s3"]["bucket"] == self.bucket_name
242234

243235
def test_s3_list_obj(self) -> None:
244-
bucket_name = "aws_bucket_name"
245-
246-
self.s3.create_bucket(Bucket=bucket_name)
236+
self.s3.create_bucket(Bucket=self.bucket_name)
247237

248238
with tracer.start_as_current_span("test"):
249-
self.s3.list_objects(Bucket=bucket_name)
239+
self.s3.list_objects(Bucket=self.bucket_name)
250240

251241
spans = self.recorder.queued_spans()
252242
assert len(spans) == 2
@@ -266,4 +256,49 @@ def test_s3_list_obj(self) -> None:
266256
assert not s3_span.ec
267257

268258
assert s3_span.data["s3"]["op"] == "ListObjects"
269-
assert s3_span.data["s3"]["bucket"] == "aws_bucket_name"
259+
assert s3_span.data["s3"]["bucket"] == self.bucket_name
260+
261+
def test_s3_resource_bucket_upload_fileobj(self) -> None:
262+
"""
263+
Verify boto3.resource().Bucket().upload_fileobj() works correctly with BytesIO objects
264+
"""
265+
test_data = b"somedata"
266+
267+
# Create a bucket using the client first
268+
self.s3.create_bucket(Bucket=self.bucket_name)
269+
270+
s3_resource = boto3.resource(
271+
"s3",
272+
region_name="us-east-1"
273+
)
274+
bucket = s3_resource.Bucket(name=self.bucket_name)
275+
276+
with tracer.start_as_current_span("test"):
277+
bucket.upload_fileobj(BytesIO(test_data), self.object_name)
278+
279+
# Verify the upload was successful by retrieving the object
280+
response = bucket.Object(self.object_name).get()
281+
file_content = response["Body"].read()
282+
283+
# Assert the content matches what we uploaded
284+
assert file_content == test_data
285+
286+
# Verify the spans were created correctly
287+
spans = self.recorder.queued_spans()
288+
assert len(spans) >= 2
289+
290+
filter = lambda span: span.n == "sdk" # noqa: E731
291+
test_span = get_first_span_by_filter(spans, filter)
292+
assert test_span
293+
294+
filter = lambda span: span.n == "s3" and span.data["s3"]["op"] == "UploadFileObj" # noqa: E731
295+
s3_span = get_first_span_by_filter(spans, filter)
296+
assert s3_span
297+
298+
assert s3_span.t == test_span.t
299+
assert s3_span.p == test_span.s
300+
301+
assert not test_span.ec
302+
assert not s3_span.ec
303+
304+
assert s3_span.data["s3"]["bucket"] == self.bucket_name

0 commit comments

Comments
 (0)