Skip to content

Commit 954ccb3

Browse files
committed
gc: merge container-prune into cloud-prune
Merged the code of the container gc into the cloud one, and update builds.json. Go through the tags in base-oscontainer data in meta.json and prune every tag except the stream-name itself which are moving tags.
1 parent 48fba72 commit 954ccb3

File tree

3 files changed

+91
-127
lines changed

3 files changed

+91
-127
lines changed

cmd/coreos-assembler.go

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -16,7 +16,7 @@ var buildCommands = []string{"init", "fetch", "build", "run", "prune", "clean",
1616
var advancedBuildCommands = []string{"buildfetch", "buildupload", "oc-adm-release", "push-container"}
1717
var buildextendCommands = []string{"aliyun", "applehv", "aws", "azure", "digitalocean", "exoscale", "extensions-container", "gcp", "hashlist-experimental", "hyperv", "ibmcloud", "kubevirt", "live", "metal", "metal4k", "nutanix", "openstack", "qemu", "secex", "virtualbox", "vmware", "vultr"}
1818

19-
var utilityCommands = []string{"aws-replicate", "cloud-prune", "compress", "container-prune", "copy-container", "koji-upload", "kola", "push-container-manifest", "remote-build-container", "remote-session", "sign", "tag", "update-variant"}
19+
var utilityCommands = []string{"aws-replicate", "cloud-prune", "compress", "copy-container", "koji-upload", "kola", "push-container-manifest", "remote-build-container", "remote-session", "sign", "tag", "update-variant"}
2020
var otherCommands = []string{"shell", "meta"}
2121

2222
func init() {

src/cmd-cloud-prune

Lines changed: 90 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -34,8 +34,10 @@
3434

3535
import argparse
3636
import json
37+
import subprocess
3738
from urllib.parse import urlparse
3839
import pytz
40+
import requests
3941
import yaml
4042
import collections
4143
import datetime
@@ -59,6 +61,8 @@ CACHE_MAX_AGE_METADATA = 60 * 5
5961
# is up to date.
6062
SUPPORTED = ["amis", "gcp"]
6163
UNSUPPORTED = ["aliyun", "azure", "ibmcloud", "powervs"]
64+
# list of known streams with containers
65+
STREAMS = {"next", "testing", "stable", "next-devel", "testing-devel", "rawhide", "branched"}
6266

6367

6468
def parse_args():
@@ -70,6 +74,9 @@ def parse_args():
7074
parser.add_argument("--gcp-json-key", help="GCP Service Account JSON Auth", default=os.environ.get("GCP_JSON_AUTH"))
7175
parser.add_argument("--acl", help="ACL for objects", action='store', default='private')
7276
parser.add_argument("--aws-config-file", default=os.environ.get("AWS_CONFIG_FILE"), help="Path to AWS config file")
77+
parser.add_argument("--repository-url", help="container images URL")
78+
parser.add_argument("--registry-auth-file", default=os.environ.get("REGISTRY_AUTH_FILE"),
79+
help="Path to docker registry auth file. Directly passed to skopeo.")
7380
return parser.parse_args()
7481

7582

@@ -125,7 +132,7 @@ def main():
125132
current_build = Build(id=build_id, images=images, arch=arch, meta_json=meta_json)
126133

127134
# Iterate over actions (policy types) to apply pruning
128-
for action in ['cloud-uploads', 'images', 'build']:
135+
for action in ['cloud-uploads', 'images', 'build', 'containers']:
129136
if action not in policy[stream]:
130137
continue
131138
action_duration = convert_duration_to_days(policy[stream][action])
@@ -162,6 +169,19 @@ def main():
162169
case "build":
163170
prune_build(s3_client, bucket, prefix, build_id, args.dry_run)
164171
pruned_build_ids.append(build_id)
172+
case "containers":
173+
container_tags = get_container_tags(meta_json, stream)
174+
if container_tags:
175+
containers_config = {
176+
"container_tags": container_tags,
177+
"dry_run": args.dry_run,
178+
"repository_url": args.repository_url,
179+
"registry_auth_file": args.registry_auth_file,
180+
"stream": stream
181+
}
182+
prune_containers(containers_config)
183+
else:
184+
print(f"No container tags to prune for build {build_id} on architecture {arch}.")
165185

166186
# Update policy-cleanup after pruning actions for the architecture
167187
policy_cleanup = build.setdefault("policy-cleanup", {})
@@ -174,6 +194,9 @@ def main():
174194
if "images" not in policy_cleanup:
175195
policy_cleanup["images"] = True
176196
policy_cleanup["images-kept"] = images_to_keep
197+
case "containers":
198+
if "containers" not in policy_cleanup:
199+
policy_cleanup["containers"] = True
177200

178201
if pruned_build_ids:
179202
if "tombstone-builds" not in builds_json_data:
@@ -414,5 +437,71 @@ def prune_build(s3_client, bucket, prefix, build_id, dry_run):
414437
raise Exception(f"Error pruning {build_id}: {e.response['Error']['Message']}")
415438

416439

440+
def get_container_tags(meta_json, stream):
441+
container_tags = []
442+
base_oscontainer = meta_json.get("base-oscontainer")
443+
if base_oscontainer:
444+
tags = base_oscontainer.get("tags", [])
445+
# Only include tags that do not match the stream i.e. moving tags
446+
filtered_tags = [tag for tag in tags if tag != stream]
447+
if filtered_tags:
448+
container_tags = filtered_tags
449+
return container_tags
450+
451+
452+
def prune_containers(containers_config):
453+
barrier_releases = set()
454+
# Get the update graph for stable streams
455+
if containers_config["stream"] in ['stable', 'testing', 'next']:
456+
update_graph = get_update_graph(containers_config["stream"])['releases']
457+
# Keep only the barrier releases
458+
barrier_releases = set([release["version"] for release in update_graph if "barrier" in release])
459+
460+
for tag in containers_config["container_tags"]:
461+
if tag in STREAMS:
462+
continue
463+
if tag in barrier_releases:
464+
print(f"Release {tag} is a barrier release, keeping.")
465+
continue
466+
if containers_config["dry_run"]:
467+
print(f"Would prune image {containers_config["repository_url"]}:{tag}")
468+
else:
469+
skopeo_delete(containers_config["repository_url"], tag, containers_config["registry_auth_file"])
470+
471+
472+
def get_update_graph(stream):
473+
url = f"https://builds.coreos.fedoraproject.org/updates/{stream}.json"
474+
r = requests.get(url, timeout=5)
475+
if r.status_code != 200:
476+
raise Exception(f"Could not download update graph for {stream}. HTTP {r.status_code}")
477+
return r.json()
478+
479+
480+
def skopeo_inspect(repo, image, auth):
481+
skopeo_args = ["skopeo", "inspect", f"docker://{repo}:{image}"]
482+
if auth:
483+
skopeo_args.extend(["--authfile", auth])
484+
try:
485+
subprocess.check_output(skopeo_args, stderr=subprocess.STDOUT)
486+
return True # Inspection succeeded
487+
except subprocess.CalledProcessError as e:
488+
print("Inspection failed:", e.output.decode("utf-8"))
489+
return False # Inspection failed
490+
491+
492+
def skopeo_delete(repo, image, auth):
493+
if skopeo_inspect(repo, image, auth): # Only proceed if inspection succeeds
494+
skopeo_args = ["skopeo", "delete", f"docker://{repo}:{image}"]
495+
if auth:
496+
skopeo_args.extend(["--authfile", auth])
497+
try:
498+
subprocess.check_output(skopeo_args, stderr=subprocess.STDOUT)
499+
print("Image deleted successfully.")
500+
except subprocess.CalledProcessError as e:
501+
raise Exception("An error occurred during deletion:", e.output.decode("utf-8"))
502+
else:
503+
raise Exception("Skipping delete as skopeo inspection failed.")
504+
505+
417506
if __name__ == "__main__":
418507
main()

src/cmd-container-prune

Lines changed: 0 additions & 125 deletions
This file was deleted.

0 commit comments

Comments
 (0)