Skip to content

Commit 5e57d26

Browse files
committed
build: automatically remove unused ImageBuilders
Instead of running an external service remove outdated ImageBuidlers after every build. This could be improved by only checking this once a day or so but for now it should fix storage issues where ImageBuidlers pile up forever. Signed-off-by: Paul Spooren <mail@aparcar.org>
1 parent c397fa1 commit 5e57d26

File tree

1 file changed

+67
-29
lines changed

1 file changed

+67
-29
lines changed

asu/build.py

Lines changed: 67 additions & 29 deletions
Original file line numberDiff line numberDiff line change
@@ -43,12 +43,11 @@ def report_error(msg):
4343
job.save_meta()
4444

4545
log.debug(f"Building {req}")
46-
cache = (
47-
req.get("cache_path", Path.cwd()) / "cache" / req["version"] / req["target"]
48-
).parent
4946
target, subtarget = req["target"].split("/")
50-
sums_file = Path(cache / f"{subtarget}_sums")
51-
sig_file = Path(cache / f"{subtarget}_sums.sig")
47+
cache = req.get("cache_path", Path.cwd()) / "cache" / req["version"]
48+
cache_workdir = cache / target / subtarget
49+
sums_file = Path(cache / target / f"{subtarget}_sums")
50+
sig_file = Path(cache / target / f"{subtarget}_sums.sig")
5251

5352
def setup_ib():
5453
"""Setup ImageBuilder based on `req`
@@ -58,8 +57,8 @@ def setup_ib():
5857
upstream.
5958
"""
6059
log.debug("Setting up ImageBuilder")
61-
if (cache / subtarget).is_dir():
62-
rmtree(cache / subtarget)
60+
if (cache_workdir).is_dir():
61+
rmtree(cache_workdir)
6362

6463
download_file("sha256sums.sig", sig_file)
6564
download_file("sha256sums", sums_file)
@@ -85,33 +84,33 @@ def setup_ib():
8584

8685
download_file(ib_archive)
8786

88-
if ib_hash != get_file_hash(cache / ib_archive):
87+
if ib_hash != get_file_hash(cache / target / ib_archive):
8988
report_error("Bad Checksum")
9089

91-
(cache / subtarget).mkdir(parents=True, exist_ok=True)
90+
(cache_workdir).mkdir(parents=True, exist_ok=True)
9291

9392
job.meta["imagebuilder_status"] = "unpack_imagebuilder"
9493
job.save_meta()
9594

9695
extract_archive = subprocess.run(
9796
["tar", "--strip-components=1", "-xf", ib_archive, "-C", subtarget],
98-
cwd=cache,
97+
cwd=cache / target,
9998
)
10099

101100
if extract_archive.returncode:
102101
report_error("Failed to unpack ImageBuilder archive")
103102

104103
log.debug(f"Extracted TAR {ib_archive}")
105104

106-
(cache / ib_archive).unlink()
105+
(cache / target / ib_archive).unlink()
107106

108107
for key in req["branch_data"].get("extra_keys", []):
109108
fingerprint = fingerprint_pubkey_usign(key)
110-
(cache / subtarget / "keys" / fingerprint).write_text(
109+
(cache_workdir / "keys" / fingerprint).write_text(
111110
f"untrusted comment: ASU extra key {fingerprint}\n{key}"
112111
)
113112

114-
repos_path = cache / subtarget / "repositories.conf"
113+
repos_path = cache_workdir / "repositories.conf"
115114
repos = repos_path.read_text()
116115

117116
extra_repos = req["branch_data"].get("extra_repos")
@@ -123,18 +122,15 @@ def setup_ib():
123122
repos_path.write_text(repos)
124123
log.debug(f"Repos:\n{repos}")
125124

126-
# backup original configuration to keep default filesystems
127-
copyfile(cache / subtarget / ".config", cache / subtarget / ".config.orig")
128-
129125
if (Path.cwd() / "seckey").exists():
130126
# link key-build to imagebuilder
131-
(cache / subtarget / "key-build").symlink_to(Path.cwd() / "seckey")
127+
(cache_workdir / "key-build").symlink_to(Path.cwd() / "seckey")
132128
if (Path.cwd() / "pubkey").exists():
133129
# link key-build.pub to imagebuilder
134-
(cache / subtarget / "key-build.pub").symlink_to(Path.cwd() / "pubkey")
130+
(cache_workdir / "key-build.pub").symlink_to(Path.cwd() / "pubkey")
135131
if (Path.cwd() / "newcert").exists():
136132
# link key-build.ucert to imagebuilder
137-
(cache / subtarget / "key-build.ucert").symlink_to(Path.cwd() / "newcert")
133+
(cache_workdir / "key-build.ucert").symlink_to(Path.cwd() / "newcert")
138134

139135
def download_file(filename: str, dest: str = None):
140136
"""Download file from upstream target path
@@ -157,12 +153,12 @@ def download_file(filename: str, dest: str = None):
157153
+ filename
158154
)
159155

160-
with open(dest or (cache / filename), "wb") as f:
156+
with open(dest or (cache / target / filename), "wb") as f:
161157
f.write(r.content)
162158

163-
cache.mkdir(parents=True, exist_ok=True)
159+
(cache / target).mkdir(parents=True, exist_ok=True)
164160

165-
stamp_file = cache / f"{subtarget}_stamp"
161+
stamp_file = cache / target / f"{subtarget}_stamp"
166162

167163
sig_file_headers = requests.head(
168164
req["upstream_url"]
@@ -187,10 +183,17 @@ def download_file(filename: str, dest: str = None):
187183
log.debug("New ImageBuilder upstream available")
188184
setup_ib()
189185

186+
if not (cache_workdir / ".config.orig").exists():
187+
# backup original configuration to keep default filesystems
188+
copyfile(
189+
cache_workdir / ".config",
190+
cache_workdir / ".config.orig",
191+
)
192+
190193
stamp_file.write_text(origin_modified)
191194

192195
info_run = subprocess.run(
193-
["make", "info"], text=True, capture_output=True, cwd=cache / subtarget
196+
["make", "info"], text=True, capture_output=True, cwd=cache_workdir
194197
)
195198

196199
version_code = re.search('Current Revision: "(r.+)"', info_run.stdout).group(1)
@@ -230,17 +233,19 @@ def download_file(filename: str, dest: str = None):
230233
"STRIP_ABI=1",
231234
],
232235
text=True,
233-
cwd=cache / subtarget,
236+
cwd=cache_workdir,
234237
capture_output=True,
235238
)
236239

237240
if manifest_run.returncode:
238241
if "Package size mismatch" in manifest_run.stderr:
239-
rmtree(cache / subtarget)
242+
rmtree(cache_workdir)
240243
return build(req)
241244
else:
242245
job.meta["stdout"] = manifest_run.stdout
243246
job.meta["stderr"] = manifest_run.stderr
247+
print(manifest_run.stdout)
248+
print(manifest_run.stderr)
244249
report_error("Impossible package selection")
245250

246251
manifest = dict(map(lambda pv: pv.split(" - "), manifest_run.stdout.splitlines()))
@@ -267,7 +272,7 @@ def download_file(filename: str, dest: str = None):
267272
log.debug("Created store path: %s", req["store_path"] / bin_dir)
268273

269274
if "filesystem" in req:
270-
config_path = cache / subtarget / ".config"
275+
config_path = cache_workdir / ".config"
271276
config = config_path.read_text()
272277

273278
for filesystem in ["squashfs", "ext4fs", "ubifs", "jffs2"]:
@@ -288,7 +293,10 @@ def download_file(filename: str, dest: str = None):
288293

289294
config_path.write_text(config)
290295
else:
291-
copyfile(cache / subtarget / ".config.orig", cache / subtarget / ".config")
296+
copyfile(
297+
cache_workdir / ".config.orig",
298+
cache_workdir / ".config",
299+
)
292300

293301
build_cmd = [
294302
"make",
@@ -317,7 +325,7 @@ def download_file(filename: str, dest: str = None):
317325
image_build = subprocess.run(
318326
build_cmd,
319327
text=True,
320-
cwd=cache / subtarget,
328+
cwd=cache_workdir,
321329
capture_output=True,
322330
)
323331

@@ -342,6 +350,8 @@ def download_file(filename: str, dest: str = None):
342350
if req["profile"] not in json_content["profiles"]:
343351
report_error("Profile not found in JSON file")
344352

353+
now_timestamp = int(datetime.now().timestamp())
354+
345355
json_content.update({"manifest": manifest})
346356
json_content.update(json_content["profiles"][req["profile"]])
347357
json_content["id"] = req["profile"]
@@ -352,6 +362,8 @@ def download_file(filename: str, dest: str = None):
352362
).strftime("%Y-%m-%dT%H:%M:%S.%fZ")
353363
json_content["detail"] = "done"
354364

365+
log.debug("JSON content %s", json_content)
366+
355367
job.connection.sadd(f"builds:{version_code}:{req['target']}", req["request_hash"])
356368

357369
job.connection.hincrby(
@@ -361,6 +373,32 @@ def download_file(filename: str, dest: str = None):
361373
),
362374
)
363375

364-
log.debug("JSON content %s", json_content)
376+
# Set last build timestamp for current target/subtarget to now
377+
job.connection.hset(
378+
f"worker:{job.worker_name}:last_build", req["target"], now_timestamp
379+
)
380+
381+
# Iterate over all targets/subtargets of the worker and remove the once inactive for a week
382+
for target_subtarget, last_build_timestamp in job.connection.hgetall(
383+
f"worker:{job.worker_name}:last_build"
384+
).items():
385+
target_subtarget = target_subtarget.decode()
386+
387+
log.debug("now_timestamp %s %s", target_subtarget, now_timestamp)
388+
log.debug(
389+
"last_build_timestamp %s %s",
390+
target_subtarget,
391+
last_build_timestamp.decode(),
392+
)
393+
394+
if now_timestamp - int(last_build_timestamp.decode()) > 60 * 60 * 24 * 7:
395+
log.info("Removing unused ImageBuilder for %s", target_subtarget)
396+
job.connection.hdel(
397+
f"worker:{job.worker_name}:last_build", target_subtarget
398+
)
399+
if cache / target_subtarget:
400+
rmtree(cache / target_subtarget)
401+
else:
402+
log.debug("Keeping ImageBuilder for %s", target_subtarget)
365403

366404
return json_content

0 commit comments

Comments
 (0)