Skip to content

Commit 77118ab

Browse files
gursewak1997jlebon
authored andcommitted
cmd-cloud-prune: Fixed cloud pruning and backed up builds.json
Updated the process to ensure that the build order is preserved while keeping all existing builds from the builds.json file intact. Additionally, added a step to print the updated builds.json during a dry run, which will be uploaded in non-dry run mode. Also, the previous version of builds.json is now backed up to the S3 bucket as builds.json.bak.
1 parent ad60ee4 commit 77118ab

File tree

1 file changed

+29
-14
lines changed

1 file changed

+29
-14
lines changed

src/cmd-cloud-prune

Lines changed: 29 additions & 14 deletions
Original file line numberDiff line numberDiff line change
@@ -105,6 +105,8 @@ def main():
105105

106106
with open(BUILDFILES['list'], "r") as f:
107107
builds_json_data = json.load(f)
108+
# Original list of builds
109+
builds = builds_json_data["builds"]
108110

109111
# Prune builds based on the policy
110112
for action in ['cloud-uploads', 'images', 'build']:
@@ -115,7 +117,7 @@ def main():
115117

116118
print(f"Pruning resources of type {action} older than {policy[stream][action]} ({ref_date.date()}) on stream {stream}")
117119
# Enumerating in reverse to go from the oldest build to the newest one
118-
for index, build in enumerate(reversed(builds_json_data["builds"])):
120+
for build in reversed(builds):
119121
build_id = build["id"]
120122
if action in build.get("policy-cleanup", []):
121123
print(f"Build {build_id} has already had {action} pruning completed")
@@ -141,13 +143,10 @@ def main():
141143
# S3().delete_object(args.bucket, f"{args.prefix}{str(current_build.id)}")
142144
case "images":
143145
raise NotImplementedError
144-
if not args.dry_run:
145-
build.setdefault("policy-cleanup", []).append("cloud-uploads")
146-
builds_json_data["builds"][index] = build
146+
build.setdefault("policy-cleanup", []).append("cloud-uploads")
147147

148-
if not args.dry_run:
149-
# Save the updated builds.json to local builds/builds.json
150-
save_builds_json(builds_json_data)
148+
# Save the updated builds.json to local builds/builds.json
149+
save_builds_json(builds_json_data, BUILDFILES['list'])
151150

152151

153152
def get_s3_bucket_and_prefix(builds_source_data_url):
@@ -201,9 +200,9 @@ def get_json_from_s3(s3, bucket, key):
201200
raise Exception(f"Error fetching the JSON file from S3 {bucket}/{key}: {e}")
202201

203202

204-
def save_builds_json(builds_json_data):
203+
def save_builds_json(builds_json_data, location):
205204
builds_json_data["timestamp"] = datetime.datetime.now(pytz.utc).strftime("%Y-%m-%dT%H:%M:%SZ")
206-
with open(BUILDFILES['list'], "w") as json_file:
205+
with open(location, "w") as json_file:
207206
json.dump(builds_json_data, json_file, indent=2)
208207

209208

@@ -225,11 +224,23 @@ def handle_upload_builds_json(s3_client, bucket, prefix, dry_run, acl):
225224
# Check if there are any changes that were made to remote(s3 version) builds.json
226225
# while the pruning was in progress
227226
if remote_builds_json != builds_json_source_data:
227+
# Before we merge the changes, let's update the local tmp/builds-source.json with the latest remote_builds_json
228+
save_builds_json(remote_builds_json, BUILDFILES['sourcedata'])
228229
print("Detected remote updates to builds.json. Merging it to the local builds.json file")
229230
remote_builds_json = update_policy_cleanup(current_builds_json, remote_builds_json)
230-
if not dry_run:
231-
# Make sure we have the merged json as local builds/builds.json
232-
save_builds_json(remote_builds_json)
231+
# Make sure we have the merged json as local builds/builds.json
232+
save_builds_json(remote_builds_json, BUILDFILES['list'])
233+
234+
# Print the updated builds.json before the s3 update
235+
with open(BUILDFILES['list'], 'r') as file:
236+
data = json.load(file)
237+
print("----")
238+
print(json.dumps(data, indent=4))
239+
print("----")
240+
241+
# Before uploading builds.json, copy the updated tmp/builds-source.json as builds.json.bak as a backup
242+
s3_copy(s3_client, BUILDFILES['sourcedata'], bucket, f'{prefix}/builds.json.bak', CACHE_MAX_AGE_METADATA, acl, extra_args={}, dry_run=dry_run)
243+
233244
# Upload the local builds.json to s3
234245
return s3_copy(s3_client, BUILDFILES['list'], bucket, f'{prefix}/builds.json', CACHE_MAX_AGE_METADATA, acl, extra_args={}, dry_run=dry_run)
235246

@@ -262,7 +273,11 @@ def prune_cloud_uploads(build, cloud_config, dry_run):
262273
def deregister_aws_amis(build, cloud_config, dry_run):
263274
errors = []
264275
aws_credentials = cloud_config.get("aws", {}).get("credentials")
265-
for ami in build.images.get("amis", []):
276+
amis = build.images.get("amis")
277+
if not amis:
278+
print(f"No AMI/Snapshot to prune for {build.id} for {build.arch}")
279+
return errors
280+
for ami in amis:
266281
region_name = ami.get("name")
267282
ami_id = ami.get("hvm")
268283
snapshot_id = ami.get("snapshot")
@@ -283,7 +298,7 @@ def delete_gcp_image(build, cloud_config, dry_run):
283298
errors = []
284299
gcp = build.images.get("gcp")
285300
if not gcp:
286-
print(f"No GCP image for {build.id} for {build.arch}")
301+
print(f"No GCP image to prune for {build.id} for {build.arch}")
287302
return errors
288303
gcp_image = gcp.get("image")
289304
project = gcp.get("project")

0 commit comments

Comments
 (0)