Skip to content

Commit 654dd77

Browse files
authored
fix: batch delete objects with a batch size of 1000 elements due to s3 api limit (#179)
1 parent 629eea8 commit 654dd77

File tree

1 file changed

+9
-5
lines changed

1 file changed

+9
-5
lines changed

webapp_deploy/main.py

Lines changed: 9 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -80,11 +80,15 @@ def cleanup_delete_files(s3_target, files):
8080
logger.info("Deleting %s" % filename)
8181
objects.append({"Key": os.path.join(s3_target_key, filename)})
8282

83-
result = s3_client.delete_objects(
84-
Bucket=s3_target_bucket, Delete={"Objects": objects}
85-
)
86-
if "Errors" in result and len(result["Errors"]) > 0:
87-
logger.warn("Errors during delete: %s" % result["Errors"])
83+
# S3 delete_objects can only delete 1000 objects at a time
84+
batch_size = 1000
85+
for i in range(0, len(objects), batch_size):
86+
batch = objects[i : i + batch_size]
87+
result = s3_client.delete_objects(
88+
Bucket=s3_target_bucket, Delete={"Objects": batch}
89+
)
90+
if "Errors" in result and len(result["Errors"]) > 0:
91+
logger.warn("Errors during delete: %s" % result["Errors"])
8892

8993

9094
def cleanup(items, s3_target, expiry):

0 commit comments

Comments
 (0)