@@ -60,7 +60,6 @@ def parse_args():
60
60
parser .add_argument ("--upload-builds-json" , help = "Push builds.json" , action = 'store_true' )
61
61
parser .add_argument ("--stream" , type = str , help = "CoreOS stream" , required = True )
62
62
parser .add_argument ("--gcp-json-key" , help = "GCP Service Account JSON Auth" , default = os .environ .get ("GCP_JSON_AUTH" ))
63
- parser .add_argument ("--gcp-project" , help = "GCP Project name" , default = os .environ .get ("GCP_PROJECT_NAME" ))
64
63
parser .add_argument ("--acl" , help = "ACL for objects" , action = 'store' , default = 'private' )
65
64
parser .add_argument ("--aws-config-file" , default = os .environ .get ("AWS_CONFIG_FILE" ), help = "Path to AWS config file" )
66
65
return parser .parse_args ()
@@ -98,7 +97,11 @@ def main():
98
97
99
98
with open (args .policy , "r" ) as f :
100
99
policy = yaml .safe_load (f )
101
- validate_policy (stream , policy )
100
+ if stream in policy :
101
+ validate_policy (stream , policy )
102
+ else :
103
+ print (f"There is no policy defined in gc-policy.yaml for { stream } " )
104
+ return
102
105
103
106
with open (BUILDFILES ['list' ], "r" ) as f :
104
107
builds_json_data = json .load (f )
@@ -159,7 +162,6 @@ def get_cloud_config(args):
159
162
return {
160
163
"gcp" : {
161
164
"json-key" : args .gcp_json_key ,
162
- "project" : args .gcp_project
163
165
},
164
166
"aws" : {
165
167
"credentials" : args .aws_config_file
@@ -207,15 +209,24 @@ def save_builds_json(builds_json_data):
207
209
208
210
def handle_upload_builds_json (s3_client , bucket , prefix , dry_run , acl ):
209
211
remote_builds_json = get_json_from_s3 (s3_client , bucket , os .path .join (prefix , "builds.json" ))
212
+ # This is the copy of builds.json from what we last downloaded from the source
210
213
with open (BUILDFILES ['sourcedata' ], "r" ) as f :
211
214
builds_json_source_data = json .load (f )
215
+ # This is the current list of builds at builds/builds.json
216
+ with open (BUILDFILES ['list' ], "r" ) as f :
217
+ current_builds_json = json .load (f )
218
+
219
+ # If there are no changes to the local builds/builds.json we won't need to upload
220
+ # anything to the s3 bucket. Will return in this scenario.
221
+ if builds_json_source_data == current_builds_json :
222
+ print ("There are no changes to the local builds/builds.json. No upload needed" )
223
+ return
224
+
212
225
# Check if there are any changes that were made to remote(s3 version) builds.json
213
226
# while the pruning was in progress
214
227
if remote_builds_json != builds_json_source_data :
215
228
print ("Detected remote updates to builds.json. Merging it to the local builds.json file" )
216
- with open (BUILDFILES ['list' ], "r" ) as f :
217
- current_builds_json = json .load (f )
218
- update_policy_cleanup (current_builds_json , remote_builds_json )
229
+ remote_builds_json = update_policy_cleanup (current_builds_json , remote_builds_json )
219
230
if not dry_run :
220
231
# Make sure we have the merged json as local builds/builds.json
221
232
save_builds_json (remote_builds_json )
@@ -232,6 +243,7 @@ def update_policy_cleanup(current_builds, remote_builds):
232
243
current_build = current_builds_dict [build_id ]
233
244
if 'policy-cleanup' in current_build :
234
245
remote_build ['policy-cleanup' ] = current_build ['policy-cleanup' ]
246
+ return remote_builds
235
247
236
248
237
249
def prune_cloud_uploads (build , cloud_config , dry_run ):
@@ -257,7 +269,7 @@ def deregister_aws_amis(build, cloud_config, dry_run):
257
269
if dry_run :
258
270
print (f"Would delete { ami_id } and { snapshot_id } for { build .id } " )
259
271
continue
260
- if ami_id and snapshot_id and region_name :
272
+ if ( ami_id or snapshot_id ) and region_name :
261
273
try :
262
274
deregister_aws_resource (ami_id , snapshot_id , region = region_name , credentials_file = aws_credentials )
263
275
except Exception as e :
@@ -274,8 +286,8 @@ def delete_gcp_image(build, cloud_config, dry_run):
274
286
print (f"No GCP image for { build .id } for { build .arch } " )
275
287
return errors
276
288
gcp_image = gcp .get ("image" )
289
+ project = gcp .get ("project" )
277
290
json_key = cloud_config .get ("gcp" , {}).get ("json-key" )
278
- project = cloud_config .get ("gcp" , {}).get ("project" )
279
291
if dry_run :
280
292
print (f"Would delete { gcp_image } GCP image for { build .id } " )
281
293
elif gcp_image and json_key and project :
0 commit comments