Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
6 changes: 0 additions & 6 deletions src/gardenlinux/s3/__main__.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,12 +6,6 @@
"""

import argparse
import os
import re
import sys
from functools import reduce
from pathlib import Path
from typing import Any, List, Set

from .s3_artifacts import S3Artifacts

Expand Down
25 changes: 16 additions & 9 deletions src/gardenlinux/s3/bucket.py
Original file line number Diff line number Diff line change
Expand Up @@ -56,7 +56,7 @@ def __init__(
if endpoint_url is not None:
s3_resource_config["endpoint_url"] = endpoint_url

self._s3_resource = boto3.resource("s3", **s3_resource_config)
self._s3_resource: Any = boto3.resource("s3", **s3_resource_config)

self._bucket = self._s3_resource.Bucket(bucket_name)
self._logger = logger
Expand Down Expand Up @@ -116,7 +116,9 @@ def download_fileobj(self, key, fp, *args, **kwargs):

self._logger.info(f"Downloaded {key} from S3 as binary data")

def read_cache_file_or_filter(self, cache_file, cache_ttl: int = 3600, **kwargs):
def read_cache_file_or_filter(
self, cache_file: str | PathLike[str] | None, cache_ttl: int = 3600, **kwargs
):
"""
Read S3 object keys from cache if valid or filter for S3 object keys.

Expand All @@ -128,19 +130,24 @@ def read_cache_file_or_filter(self, cache_file, cache_ttl: int = 3600, **kwargs)
:since: 0.9.0
"""

if not isinstance(cache_file, PathLike):
cache_file = Path(cache_file)
if cache_file is not None:
cache_path = Path(cache_file)

if cache_file.exists() and (time() - cache_file.stat().st_mtime) < cache_ttl:
with cache_file.open("r") as fp:
return json.loads(fp.read())
if (
cache_path.exists()
and (time() - cache_path.stat().st_mtime) < cache_ttl
):
with cache_path.open("r") as fp:
return json.loads(fp.read())
else:
cache_path = None

artifacts = [
s3_object.key for s3_object in self._bucket.objects.filter(**kwargs).all()
]

if cache_file is not None:
with cache_file.open("w") as fp:
if cache_path is not None:
with cache_path.open("w") as fp:
fp.write(json.dumps(artifacts))

return artifacts
Expand Down
41 changes: 25 additions & 16 deletions src/gardenlinux/s3/s3_artifacts.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,6 @@
import yaml

from ..features.cname import CName
from ..logger import LoggerSetup
from .bucket import Bucket


Expand Down Expand Up @@ -53,7 +52,7 @@ def __init__(
:since: 0.8.0
"""

self._bucket = Bucket(bucket_name, endpoint_url, s3_resource_config)
self._bucket = Bucket(bucket_name, endpoint_url, s3_resource_config, logger)

@property
def bucket(self):
Expand All @@ -68,8 +67,8 @@ def bucket(self):

def download_to_directory(
self,
cname,
artifacts_dir,
cname: str,
artifacts_dir: str | PathLike[str],
):
"""
Download S3 artifacts to a given directory.
Expand All @@ -80,8 +79,7 @@ def download_to_directory(
:since: 0.8.0
"""

if not isinstance(artifacts_dir, PathLike):
artifacts_dir = Path(artifacts_dir)
artifacts_dir = Path(artifacts_dir)

if not artifacts_dir.is_dir():
raise RuntimeError(f"Artifacts directory given is invalid: {artifacts_dir}")
Expand All @@ -101,8 +99,8 @@ def download_to_directory(

def upload_from_directory(
self,
cname,
artifacts_dir,
cname: str,
artifacts_dir: str | PathLike[str],
delete_before_push=False,
):
"""
Expand All @@ -115,8 +113,7 @@ def upload_from_directory(
:since: 0.8.0
"""

if not isinstance(artifacts_dir, PathLike):
artifacts_dir = Path(artifacts_dir)
artifacts_dir = Path(artifacts_dir)

cname_object = CName(cname)

Expand Down Expand Up @@ -192,22 +189,34 @@ def upload_from_directory(
"paths": [],
}

# Catch any invalid artifacts
bad_files = [
f
for f in artifacts_dir.iterdir()
if not f.name.startswith(cname)
and f.suffix not in [".release", ".requirements"]
]
if bad_files:
raise RuntimeError(
f"Artifact name '{bad_files[0].name}' does not start with cname '{cname}'"
)

for artifact in artifacts_dir.iterdir():
if not artifact.match(f"{cname}*"):
continue

if not artifact.name.startswith(cname):
raise RuntimeError(
f"Artifact name '{artifact.name}' does not start with cname '{cname}'"
)

s3_key = f"objects/{cname}/{artifact.name}"

with artifact.open("rb") as fp:
md5sum = file_digest(fp, "md5").hexdigest()
sha256sum = file_digest(fp, "sha256").hexdigest()

if artifact.name.startswith(cname):
suffix = artifact.name[len(cname) :]
else:
raise RuntimeError(
f"Artifact name '{artifact.name}' does not start with cname '{cname}'"
)
suffix = artifact.name[len(cname) :]

artifact_metadata = {
"name": artifact.name,
Expand Down
12 changes: 9 additions & 3 deletions tests/s3/test_bucket.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,15 +8,21 @@
"""

import io
from pathlib import Path

import pytest

from gardenlinux.s3.bucket import Bucket

REGION = "us-east-1"


def test_bucket_minimal(s3_setup):
"""
Ensure Bucket initializes correctly.
"""
env = s3_setup
bucket = Bucket(env.bucket_name)
assert bucket.name == env.bucket_name


def test_objects_empty(s3_setup):
"""
List objects from empty bucket.
Expand Down
54 changes: 54 additions & 0 deletions tests/s3/test_s3_artifacts.py
Original file line number Diff line number Diff line change
Expand Up @@ -202,6 +202,58 @@ def test_upload_from_directory_version_mismatch_raises(s3_setup):
artifacts.upload_from_directory(env.cname, env.tmp_path)


def test_upload_from_directory_none_version_raises(monkeypatch, s3_setup):
"""
Raise RuntimeError if CName.version is None.
"""
# Arrange
env = s3_setup
(env.tmp_path / f"{env.cname}.release").write_text(RELEASE_DATA)

import gardenlinux.s3.s3_artifacts as s3art

# Monkeypatch CName to force Cname.version to be None and avoid
# class internal error checks
class DummyCName:
arch = "amd64"
version = None
commit_id = "abc123"
platform = "aws"

def __init__(self, cname):
pass

monkeypatch.setattr(s3art, "CName", DummyCName)

artifacts = s3art.S3Artifacts(env.bucket_name)

# Act / Assert
with pytest.raises(
RuntimeError,
match="Release file data and given cname conflict detected: Version None",
):
artifacts.upload_from_directory(env.cname, env.tmp_path)


def test_upload_from_directory_invalid_artifact_name(s3_setup):
"""
Raise RuntimeError if artifact file does not start with cname.
"""
# Arrange
env = s3_setup
(env.tmp_path / f"{env.cname}.release").write_text(RELEASE_DATA)

# Create "bad" artifact that does not start with cname
bad_file = env.tmp_path / "no_match"
bad_file.write_bytes(b"oops")

artifacts = S3Artifacts(env.bucket_name)

# Act / Assert
with pytest.raises(RuntimeError, match="does not start with cname"):
artifacts.upload_from_directory(env.cname, env.tmp_path)


def test_upload_from_directory_commit_mismatch_raises(s3_setup):
"""Raise RuntimeError when commit ID is not matching with cname."""
# Arrange
Expand All @@ -210,6 +262,8 @@ def test_upload_from_directory_commit_mismatch_raises(s3_setup):
bad_data = RELEASE_DATA.replace("abc123", "wrong")
release_path.write_text(bad_data)
artifacts = S3Artifacts(env.bucket_name)

# Act / Assert
with pytest.raises(RuntimeError, match="Commit ID"):
artifacts.upload_from_directory(env.cname, env.tmp_path)

Expand Down
Loading