@@ -105,6 +105,8 @@ def main():
105
105
106
106
with open (BUILDFILES ['list' ], "r" ) as f :
107
107
builds_json_data = json .load (f )
108
+ # Original list of builds
109
+ builds = builds_json_data ["builds" ]
108
110
109
111
# Prune builds based on the policy
110
112
for action in ['cloud-uploads' , 'images' , 'build' ]:
@@ -115,7 +117,7 @@ def main():
115
117
116
118
print (f"Pruning resources of type { action } older than { policy [stream ][action ]} ({ ref_date .date ()} ) on stream { stream } " )
117
119
# Enumerating in reverse to go from the oldest build to the newest one
118
- for index , build in enumerate ( reversed (builds_json_data [ " builds" ]) ):
120
+ for build in reversed (builds ):
119
121
build_id = build ["id" ]
120
122
if action in build .get ("policy-cleanup" , []):
121
123
print (f"Build { build_id } has already had { action } pruning completed" )
@@ -141,13 +143,10 @@ def main():
141
143
# S3().delete_object(args.bucket, f"{args.prefix}{str(current_build.id)}")
142
144
case "images" :
143
145
raise NotImplementedError
144
- if not args .dry_run :
145
- build .setdefault ("policy-cleanup" , []).append ("cloud-uploads" )
146
- builds_json_data ["builds" ][index ] = build
146
+ build .setdefault ("policy-cleanup" , []).append ("cloud-uploads" )
147
147
148
- if not args .dry_run :
149
- # Save the updated builds.json to local builds/builds.json
150
- save_builds_json (builds_json_data )
148
+ # Save the updated builds.json to local builds/builds.json
149
+ save_builds_json (builds_json_data , BUILDFILES ['list' ])
151
150
152
151
153
152
def get_s3_bucket_and_prefix (builds_source_data_url ):
@@ -201,9 +200,9 @@ def get_json_from_s3(s3, bucket, key):
201
200
raise Exception (f"Error fetching the JSON file from S3 { bucket } /{ key } : { e } " )
202
201
203
202
204
- def save_builds_json (builds_json_data ):
203
+ def save_builds_json (builds_json_data , location ):
205
204
builds_json_data ["timestamp" ] = datetime .datetime .now (pytz .utc ).strftime ("%Y-%m-%dT%H:%M:%SZ" )
206
- with open (BUILDFILES [ 'list' ] , "w" ) as json_file :
205
+ with open (location , "w" ) as json_file :
207
206
json .dump (builds_json_data , json_file , indent = 2 )
208
207
209
208
@@ -225,11 +224,23 @@ def handle_upload_builds_json(s3_client, bucket, prefix, dry_run, acl):
225
224
# Check if there are any changes that were made to remote(s3 version) builds.json
226
225
# while the pruning was in progress
227
226
if remote_builds_json != builds_json_source_data :
227
+ # Before we merge the changes, let's update the local tmp/builds-source.json with the latest remote_builds_json
228
+ save_builds_json (remote_builds_json , BUILDFILES ['sourcedata' ])
228
229
print ("Detected remote updates to builds.json. Merging it to the local builds.json file" )
229
230
remote_builds_json = update_policy_cleanup (current_builds_json , remote_builds_json )
230
- if not dry_run :
231
- # Make sure we have the merged json as local builds/builds.json
232
- save_builds_json (remote_builds_json )
231
+ # Make sure we have the merged json as local builds/builds.json
232
+ save_builds_json (remote_builds_json , BUILDFILES ['list' ])
233
+
234
+ # Print the updated builds.json before the s3 update
235
+ with open (BUILDFILES ['list' ], 'r' ) as file :
236
+ data = json .load (file )
237
+ print ("----" )
238
+ print (json .dumps (data , indent = 4 ))
239
+ print ("----" )
240
+
241
+ # Before uploading builds.json, copy the updated tmp/builds-source.json as builds.json.bak as a backup
242
+ s3_copy (s3_client , BUILDFILES ['sourcedata' ], bucket , f'{ prefix } /builds.json.bak' , CACHE_MAX_AGE_METADATA , acl , extra_args = {}, dry_run = dry_run )
243
+
233
244
# Upload the local builds.json to s3
234
245
return s3_copy (s3_client , BUILDFILES ['list' ], bucket , f'{ prefix } /builds.json' , CACHE_MAX_AGE_METADATA , acl , extra_args = {}, dry_run = dry_run )
235
246
@@ -262,7 +273,11 @@ def prune_cloud_uploads(build, cloud_config, dry_run):
262
273
def deregister_aws_amis (build , cloud_config , dry_run ):
263
274
errors = []
264
275
aws_credentials = cloud_config .get ("aws" , {}).get ("credentials" )
265
- for ami in build .images .get ("amis" , []):
276
+ amis = build .images .get ("amis" )
277
+ if not amis :
278
+ print (f"No AMI/Snapshot to prune for { build .id } for { build .arch } " )
279
+ return errors
280
+ for ami in amis :
266
281
region_name = ami .get ("name" )
267
282
ami_id = ami .get ("hvm" )
268
283
snapshot_id = ami .get ("snapshot" )
@@ -283,7 +298,7 @@ def delete_gcp_image(build, cloud_config, dry_run):
283
298
errors = []
284
299
gcp = build .images .get ("gcp" )
285
300
if not gcp :
286
- print (f"No GCP image for { build .id } for { build .arch } " )
301
+ print (f"No GCP image to prune for { build .id } for { build .arch } " )
287
302
return errors
288
303
gcp_image = gcp .get ("image" )
289
304
project = gcp .get ("project" )
0 commit comments