Skip to content

Commit dbe36c4

Browse files
committed
Integrate S3ObjectIndex functionality into gardenlinux.s3.Bucket
Signed-off-by: Tobias Wolf <[email protected]>
1 parent 7e2f553 commit dbe36c4

File tree

5 files changed

+71
-145
lines changed

5 files changed

+71
-145
lines changed

src/gardenlinux/s3/__init__.py

Lines changed: 1 addition & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -6,6 +6,5 @@
66

77
from .bucket import Bucket
88
from .s3_artifacts import S3Artifacts
9-
from .s3_object_index import S3ObjectIndex
109

11-
__all__ = ["Bucket", "S3Artifacts", "S3ObjectIndex"]
10+
__all__ = ["Bucket", "S3Artifacts"]

src/gardenlinux/s3/bucket.py

Lines changed: 35 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -4,10 +4,15 @@
44
S3 bucket
55
"""
66

7-
import boto3
7+
import json
88
import logging
9+
from os import PathLike
10+
from pathlib import Path
11+
from time import time
912
from typing import Any, Optional
1013

14+
import boto3
15+
1116
from ..logger import LoggerSetup
1217

1318

@@ -111,6 +116,35 @@ def download_fileobj(self, key, fp, *args, **kwargs):
111116

112117
self._logger.info(f"Downloaded {key} from S3 as binary data")
113118

119+
def read_cache_file_or_filter(self, cache_file, cache_ttl: int = 3600, **kwargs):
120+
"""
121+
Read S3 object keys from cache if valid or filter for S3 object keys.
122+
123+
:param cache_file: Path to cache file
124+
:param cache_ttl: Cache time-to-live in seconds
125+
126+
:returns: S3 object keys read or filtered
127+
128+
:since: 0.9.0
129+
"""
130+
131+
if not isinstance(cache_file, PathLike):
132+
cache_file = Path(cache_file)
133+
134+
if cache_file.exists() and (time() - cache_file.stat().st_mtime) < cache_ttl:
135+
with cache_file.open("r") as fp:
136+
return json.loads(fp.read())
137+
138+
artifacts = [
139+
s3_object.key for s3_object in self._bucket.objects.filter(**kwargs).all()
140+
]
141+
142+
if cache_file is not None:
143+
with cache_file.open("w") as fp:
144+
fp.write(json.dumps(artifacts))
145+
146+
return artifacts
147+
114148
def upload_file(self, file_name, key, *args, **kwargs):
115149
"""
116150
boto3: Upload a file to an S3 object.

src/gardenlinux/s3/s3_artifacts.py

Lines changed: 12 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -16,11 +16,10 @@
1616
from typing import Any, Optional
1717
from urllib.parse import urlencode
1818

19+
from .bucket import Bucket
1920
from ..features.cname import CName
2021
from ..logger import LoggerSetup
2122

22-
from .bucket import Bucket
23-
2423

2524
class S3Artifacts(object):
2625
"""
@@ -55,6 +54,17 @@ def __init__(
5554

5655
self._bucket = Bucket(bucket_name, endpoint_url, s3_resource_config)
5756

57+
@property
58+
def bucket(self):
59+
"""
60+
Returns the underlying S3 bucket.
61+
62+
:return: (boto3.Bucket) S3 bucket
63+
:since: 0.9.0
64+
"""
65+
66+
return self._bucket
67+
5868
def download_to_directory(
5969
self,
6070
cname,

src/gardenlinux/s3/s3_object_index.py

Lines changed: 0 additions & 140 deletions
This file was deleted.

tests/s3/test_bucket.py

Lines changed: 23 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -69,6 +69,29 @@ def test_download_file(s3_setup):
6969
assert target_path.read_text() == "some data"
7070

7171

72+
def test_read_cache_file_or_filter(s3_setup):
73+
"""
74+
Try to read with cache
75+
"""
76+
77+
env = s3_setup
78+
env.s3.Object(env.bucket_name, "file.txt").put(Body=b"some data")
79+
80+
bucket = Bucket(env.bucket_name, s3_resource_config={"region_name": REGION})
81+
cache_file = env.tmp_path / "s3.cache.json"
82+
83+
result = bucket.read_cache_file_or_filter(cache_file, 1, Prefix="file")
84+
assert result == ["file.txt"]
85+
86+
env.s3.Object(env.bucket_name, "file2.txt").put(Body=b"some data")
87+
88+
result = bucket.read_cache_file_or_filter(cache_file, 3600, Prefix="file")
89+
assert result == ["file.txt"]
90+
91+
result = bucket.read_cache_file_or_filter(cache_file, 0, Prefix="file")
92+
assert result == ["file.txt", "file2.txt"]
93+
94+
7295
def test_upload_fileobj(s3_setup):
7396
"""
7497
Upload a file-like in-memory object to the bucket

0 commit comments

Comments
 (0)