Skip to content

Commit a190283

Browse files
committed
Test: Add tests for S3Artifact class
1 parent 44b2c48 commit a190283

File tree

1 file changed

+221
-0
lines changed

1 file changed

+221
-0
lines changed

tests/s3/test_s3_artifacts.py

Lines changed: 221 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,221 @@
1+
import pytest
2+
from unittest.mock import MagicMock, patch
3+
from pathlib import Path
4+
from tempfile import TemporaryDirectory
5+
from hashlib import md5, sha256
6+
from gardenlinux.s3.s3_artifacts import S3Artifacts
7+
8+
9+
# Dummy CName replacement
10+
class DummyCName:
11+
def __init__(self, cname):
12+
self.platform = "aws"
13+
self.arch = "amd64"
14+
self.version = "1234.1"
15+
self.commit_id = "abc123"
16+
17+
18+
# Helpers to compute digests for fake files
19+
def dummy_digest(data: bytes, algo: str) -> str:
20+
"""
21+
Dummy for file_digest() to compute hashes for in-memory byte streams
22+
"""
23+
content = data.read()
24+
data.seek(0) # Reset byte cursor to start for multiple uses
25+
26+
if algo == "md5":
27+
return md5(content)
28+
elif algo == "sha256":
29+
return sha256(content)
30+
else:
31+
raise ValueError(f"Unsupported algo: {algo}")
32+
33+
34+
@patch("gardenlinux.s3.s3_artifacts.Bucket")
35+
def test_s3artifacts_init_success(mock_bucket_class):
36+
"""
37+
Sanity test to assert correct instantiation of S3Artifacts object
38+
"""
39+
mock_bucket_instance = MagicMock()
40+
mock_bucket_class.return_value = mock_bucket_instance
41+
42+
s3 = S3Artifacts("my-bucket")
43+
44+
mock_bucket_class.assert_called_once_with("my-bucket", None, None)
45+
assert s3._bucket == mock_bucket_instance
46+
47+
48+
@patch("gardenlinux.s3.s3_artifacts.Bucket")
49+
def test_s3_artifacts_invalid_bucket(mock_bucket):
50+
"""
51+
Sanity test to check proper Error raising when using non-existing bucket
52+
"""
53+
# Simulate an exception being raised when trying to create the Bucket
54+
mock_bucket.side_effect = RuntimeError("Bucket does not exist")
55+
56+
with pytest.raises(RuntimeError, match="Bucket does not exist"):
57+
S3Artifacts("invalid-bucket")
58+
59+
60+
@patch("gardenlinux.s3.s3_artifacts.CName", new=DummyCName)
61+
@patch("gardenlinux.s3.s3_artifacts.Bucket")
62+
def test_download_to_directory_success(mock_bucket_class):
63+
"""
64+
Test download of mutliple files to directory on disk.
65+
"""
66+
# Arrange
67+
# Create mock bucket instance
68+
mock_bucket = MagicMock()
69+
70+
# Mock release object
71+
release_object = MagicMock()
72+
release_object.key = "meta/singles/testcname"
73+
74+
# Mock objects to be downloaded
75+
s3_obj1 = MagicMock()
76+
s3_obj1.key = "objects/testcname/file1"
77+
s3_obj2 = MagicMock()
78+
s3_obj2.key = "objects/testcname/file2"
79+
80+
# Mock return value of .filter().all() from boto3
81+
class MockFilterReturn:
82+
def all(self):
83+
return [s3_obj1, s3_obj2]
84+
85+
# Mock teh behaviour of .objects.filter(Prefix=...)
86+
# Lets us simulate different responses depending on prefix
87+
def filter_side_effect(Prefix):
88+
# When fetching metadata
89+
if Prefix == "meta/singles/testcname":
90+
return [release_object] # return list with release file
91+
# When fetching actual artifact
92+
elif Prefix == "objects/testcname":
93+
return MockFilterReturn() # return mock object
94+
return [] # Nothing found
95+
96+
# Act
97+
mock_bucket.objects.filter.side_effect = filter_side_effect
98+
mock_bucket_class.return_value = mock_bucket
99+
100+
with TemporaryDirectory() as tmpdir:
101+
artifacts_dir = Path(tmpdir)
102+
103+
s3 = S3Artifacts("test-bucket")
104+
s3.download_to_directory("testcname", artifacts_dir)
105+
106+
# Assert
107+
# Validate download_file called with correct metadata path
108+
mock_bucket.download_file.assert_any_call(
109+
"meta/singles/testcname",
110+
artifacts_dir / "testcname.s3_metadata.yaml",
111+
)
112+
113+
# Validate files were downloaded from object keys
114+
mock_bucket.download_file.assert_any_call(
115+
"objects/testcname/file1", artifacts_dir / "file1"
116+
)
117+
mock_bucket.download_file.assert_any_call(
118+
"objects/testcname/file2", artifacts_dir / "file2"
119+
)
120+
121+
assert mock_bucket.download_file.call_count == 3
122+
123+
124+
@patch("gardenlinux.s3.s3_artifacts.Bucket")
125+
def test_download_to_directory_invalid_path(mock_bucket):
126+
"""
127+
Sanity Test to test behaviour on invalid paths
128+
"""
129+
s3 = S3Artifacts("bucket")
130+
with pytest.raises(RuntimeError):
131+
s3.download_to_directory("test-cname", "/invalid/path/does/not/exist")
132+
133+
134+
@patch("gardenlinux.s3.s3_artifacts.file_digest", side_effect=dummy_digest)
135+
@patch("gardenlinux.s3.s3_artifacts.CName", new=DummyCName)
136+
@patch("gardenlinux.s3.s3_artifacts.Bucket")
137+
def test_upload_from_directory_success(mock_bucket_class, mock_digest):
138+
"""
139+
Test upload of multiple artifacts from disk to bucket
140+
"""
141+
# Arrange
142+
mock_bucket = MagicMock()
143+
mock_bucket.name = "test-bucket"
144+
mock_bucket_class.return_value = mock_bucket
145+
146+
# Create a fake .release file
147+
release_data = """
148+
GARDENLINUX_VERSION = 1234.1
149+
GARDENLINUX_COMMIT_ID = abc123
150+
GARDENLINUX_COMMIT_ID_LONG = abc123long
151+
GARDENLINUX_FEATURES = _usi,_trustedboot
152+
"""
153+
154+
# Create a fake release file and two artifact files
155+
with TemporaryDirectory() as tmpdir:
156+
artifacts_dir = Path(tmpdir)
157+
cname = "testcname"
158+
159+
# Write .release file
160+
release_path = artifacts_dir / f"{cname}.release"
161+
release_path.write_text(release_data)
162+
163+
# Create dummy files for upload
164+
for name in [f"{cname}-file1", f"{cname}-file2"]:
165+
(artifacts_dir / name).write_bytes(b"dummy content")
166+
167+
s3 = S3Artifacts("test-bucket")
168+
169+
# Act
170+
s3.upload_from_directory(cname, artifacts_dir)
171+
172+
# Assert
173+
calls = mock_bucket.upload_file.call_args_list
174+
175+
# Check that for each file, upload_file was called with ExtraArgs containing "Tagging"
176+
for name in [f"{cname}-file1", f"{cname}-file2"]:
177+
key = f"objects/{cname}/{name}"
178+
path = artifacts_dir / name
179+
180+
# Look for a call with matching positional args (path, key)
181+
matching_calls = [
182+
call
183+
for call in calls
184+
if call.args[0] == path
185+
and call.args[1] == key
186+
and isinstance(call.kwargs.get("ExtraArgs"), dict)
187+
and "Tagging" in call.kwargs["ExtraArgs"]
188+
]
189+
assert matching_calls, f"upload_file was not called with Tagging for {name}"
190+
191+
192+
@patch("gardenlinux.s3.s3_artifacts.file_digest", side_effect=dummy_digest)
193+
@patch("gardenlinux.s3.s3_artifacts.CName", new=DummyCName)
194+
@patch("gardenlinux.s3.s3_artifacts.Bucket")
195+
def test_upload_from_directory_with_delete(mock_bucket_class, mock_digest, tmp_path):
196+
"""
197+
Test that upload_from_directory deletes existing files before uploading
198+
when delete_before_push=True
199+
"""
200+
mock_bucket = MagicMock()
201+
mock_bucket.name = "test-bucket"
202+
mock_bucket_class.return_value = mock_bucket
203+
204+
s3 = S3Artifacts("test-bucket")
205+
cname = "test-cname"
206+
207+
release = tmp_path / f"{cname}.release"
208+
release.write_text(
209+
"GARDENLINUX_VERSION = 1234.1\n"
210+
"GARDENLINUX_COMMIT_ID = abc123\n"
211+
"GARDENLINUX_COMMIT_ID_LONG = abc123long\n"
212+
"GARDENLINUX_FEATURES = _usi,_trustedboot\n"
213+
)
214+
215+
artifact = tmp_path / f"{cname}.kernel"
216+
artifact.write_bytes(b"fake")
217+
218+
s3.upload_from_directory(cname, tmp_path, delete_before_push=True)
219+
220+
mock_bucket.delete_objects.assert_any_call(Delete={"Objects": [{"Key": f"objects/{cname}/{artifact.name}"}]})
221+
mock_bucket.delete_objects.assert_any_call(Delete={"Objects": [{"Key": f"meta/singles/{cname}"}]})

0 commit comments

Comments
 (0)