|
1 | 1 | # -*- coding: utf-8 -*- |
2 | 2 |
|
| 3 | +import yaml |
3 | 4 | from pathlib import Path |
4 | 5 | from tempfile import TemporaryDirectory |
5 | 6 | import pytest |
@@ -69,6 +70,26 @@ def test_download_to_directory_invalid_path(s3_setup): |
69 | 70 | artifacts.download_to_directory({env.cname}, "/invalid/path/does/not/exist") |
70 | 71 |
|
71 | 72 |
|
| 73 | +def test_download_to_directory_non_pathlike_raises(s3_setup): |
| 74 | + """Raise RuntimeError if artifacts_dir is not a dir""" |
| 75 | + env = s3_setup |
| 76 | + artifacts = S3Artifacts(env.bucket_name) |
| 77 | + with pytest.raises(RuntimeError): |
| 78 | + artifacts.download_to_directory(env.cname, "nopath") |
| 79 | + |
| 80 | + |
| 81 | +def test_download_to_directory_no_metadata_raises(s3_setup): |
| 82 | + """Should raise IndexError if bucket has no matching metadata object.""" |
| 83 | + # Arrange |
| 84 | + env = s3_setup |
| 85 | + artifacts = S3Artifacts(env.bucket_name) |
| 86 | + |
| 87 | + # Act / Assert |
| 88 | + with TemporaryDirectory() as tmpdir: |
| 89 | + with pytest.raises(IndexError): |
| 90 | + artifacts.download_to_directory(env.cname, tmpdir) |
| 91 | + |
| 92 | + |
72 | 93 | def test_upload_from_directory_success(s3_setup): |
73 | 94 | """ |
74 | 95 | Test upload of multiple artifacts from disk to bucket |
@@ -125,3 +146,89 @@ def test_upload_from_directory_with_delete(s3_setup): |
125 | 146 | # but the new upload file key should exist (artifact uploaded) |
126 | 147 | assert f"objects/{env.cname}/{artifact.name}" in keys |
127 | 148 | assert f"meta/singles/{env.cname}" in keys |
| 149 | + |
| 150 | + |
| 151 | +def test_upload_from_directory_arch_none_raises(monkeypatch, s3_setup): |
| 152 | + """Raise RuntimeError when CName has no arch""" |
| 153 | + # Arrange |
| 154 | + env = s3_setup |
| 155 | + release_path = env.tmp_path / f"{env.cname}.release" |
| 156 | + release_path.write_text(RELEASE_DATA) |
| 157 | + |
| 158 | + # Monkeypatch CName to simulate missing architecture |
| 159 | + import gardenlinux.s3.s3_artifacts as s3art |
| 160 | + |
| 161 | + class DummyCName: |
| 162 | + arch = None |
| 163 | + |
| 164 | + def __init__(self, cname): |
| 165 | + pass |
| 166 | + |
| 167 | + monkeypatch.setattr(s3art, "CName", DummyCName) |
| 168 | + |
| 169 | + # Act / Assert |
| 170 | + artifacts = S3Artifacts(env.bucket_name) |
| 171 | + with pytest.raises(RuntimeError, match="Architecture could not be determined"): |
| 172 | + artifacts.upload_from_directory(env.cname, env.tmp_path) |
| 173 | + |
| 174 | + |
| 175 | +def test_upload_from_directory_invalid_dir_raises(s3_setup): |
| 176 | + """Raise RuntimeError if artifacts_dir is invalid""" |
| 177 | + env = s3_setup |
| 178 | + artifacts = S3Artifacts(env.bucket_name) |
| 179 | + with pytest.raises(RuntimeError, match="invalid"): |
| 180 | + artifacts.upload_from_directory(env.cname, "/invalid/path") |
| 181 | + |
| 182 | + |
| 183 | +def test_upload_from_directory_version_mismatch_raises(s3_setup): |
| 184 | + """ |
| 185 | + RuntimeError if version in release file does not match cname. |
| 186 | + """ |
| 187 | + # Arrange |
| 188 | + env = s3_setup |
| 189 | + release_path = env.tmp_path / f"{env.cname}.release" |
| 190 | + bad_data = RELEASE_DATA.replace("1234.1", "9999.9") |
| 191 | + release_path.write_text(bad_data) |
| 192 | + artifacts = S3Artifacts(env.bucket_name) |
| 193 | + |
| 194 | + # Act / Assert |
| 195 | + with pytest.raises(RuntimeError, match="Version"): |
| 196 | + artifacts.upload_from_directory(env.cname, env.tmp_path) |
| 197 | + |
| 198 | + |
| 199 | +def test_upload_from_directory_commit_mismatch_raises(s3_setup): |
| 200 | + """Raise RuntimeError when commit ID is not matching with cname.""" |
| 201 | + # Arrange |
| 202 | + env = s3_setup |
| 203 | + release_path = env.tmp_path / f"{env.cname}.release" |
| 204 | + bad_data = RELEASE_DATA.replace("abc123", "wrong") |
| 205 | + release_path.write_text(bad_data) |
| 206 | + artifacts = S3Artifacts(env.bucket_name) |
| 207 | + with pytest.raises(RuntimeError, match="Commit ID"): |
| 208 | + artifacts.upload_from_directory(env.cname, env.tmp_path) |
| 209 | + |
| 210 | + |
| 211 | +def test_upload_directory_with_requirements_override(s3_setup): |
| 212 | + """Ensure .requirements file values overide feature flag defaults.""" |
| 213 | + # Arrange |
| 214 | + env = s3_setup |
| 215 | + (env.tmp_path / f"{env.cname}.release").write_text(RELEASE_DATA) |
| 216 | + (env.tmp_path / f"{env.cname}.requirements").write_text( |
| 217 | + "uefi = false\nsecureboot = true\n" |
| 218 | + ) |
| 219 | + artifact_file = env.tmp_path / f"{env.cname}-artifact" |
| 220 | + artifact_file.write_bytes(b"abc") |
| 221 | + |
| 222 | + # Act |
| 223 | + artifacts = S3Artifacts(env.bucket_name) |
| 224 | + artifacts.upload_from_directory(env.cname, env.tmp_path) |
| 225 | + |
| 226 | + # Assert |
| 227 | + bucket = env.s3.Bucket(env.bucket_name) |
| 228 | + meta_obj = next( |
| 229 | + o for o in bucket.objects.all() if o.key == f"meta/singles/{env.cname}" |
| 230 | + ) |
| 231 | + body = meta_obj.get()["Body"].read().decode() |
| 232 | + metadata = yaml.safe_load(body) |
| 233 | + assert metadata["require_uefi"] is False |
| 234 | + assert metadata["secureboot"] is True |
0 commit comments