Skip to content

Commit be08c20

Browse files
author
matthias_schaub
committed
Use same logic and structure for handling of max write limit during fb interaction as delete_project.
1 parent a5035c3 commit be08c20

File tree

1 file changed

+33
-33
lines changed

1 file changed

+33
-33
lines changed

mapswipe_workers/mapswipe_workers/firebase_to_postgres/archive_project.py

Lines changed: 33 additions & 33 deletions
Original file line numberDiff line numberDiff line change
@@ -2,10 +2,18 @@
22
Archive a project.
33
"""
44

5+
from firebase_admin import exceptions
6+
57
from mapswipe_workers import auth
68
from mapswipe_workers.definitions import logger
79

810

11+
def chunks(data, size=250):
12+
"""Yield successive n-sized chunks from list."""
13+
for i in range(0, len(data), size):
14+
yield data[i : i + size]
15+
16+
917
def archive_project(project_ids: list) -> None:
1018
"""
1119
Archive a project.
@@ -15,46 +23,38 @@ def archive_project(project_ids: list) -> None:
1523
"""
1624
for project_id in project_ids:
1725
logger.info(f"Archive project with the id {project_id}")
18-
logger.info(f"Delete results of project with the id {project_id}")
1926

2027
fb_db = auth.firebaseDB()
21-
fb_db.reference(f"v2/results/{project_id}").set({})
22-
23-
# get group keys for this project to estimate size in firebase
24-
groups = fb_db.reference(f"v2/groups/{project_id}").get(shallow=True)
25-
26-
if not groups:
27-
logger.info("no groups to delete in firebase")
28-
else:
29-
group_keys = list(groups.keys())
30-
chunk_size = 250
31-
chunks = int(len(group_keys) / chunk_size) + 1
32-
33-
# delete groups, tasks in firebase for each chunk using the update function
34-
for i in range(0, chunks):
35-
logger.info(
36-
f"Delete max {chunk_size} groups and tasks"
37-
f"of project with the id {project_id}"
38-
)
39-
update_dict = {}
40-
for group_id in group_keys[:chunk_size]:
41-
update_dict[group_id] = None
42-
fb_db.reference(f"v2/groups/{project_id}").update(update_dict)
43-
fb_db.reference(f"v2/tasks/{project_id}").update(update_dict)
44-
group_keys = group_keys[chunk_size:]
45-
46-
logger.info(
47-
f"Set status=archived in Firebase for project with the id {project_id}"
48-
)
49-
fb_db = auth.firebaseDB()
28+
ref = fb_db.reference(f"v2/results/{project_id}")
29+
try:
30+
ref.delete()
31+
except exceptions.InvalidArgumentError:
32+
# Data to write exceeds the maximum size that can be modified
33+
# with a single request. Delete chunks of data instead.
34+
childs = ref.get(shallow=True)
35+
for chunk in chunks(list(childs.keys())):
36+
ref.update({key: None for key in chunk})
37+
ref.delete()
38+
39+
ref = fb_db.reference(f"v2/results/{project_id}")
40+
try:
41+
ref.delete()
42+
except exceptions.InvalidArgumentError:
43+
# Data to write exceeds the maximum size that can be modified
44+
# with a single request. Delete chunks of data instead.
45+
childs = ref.get(shallow=True)
46+
for chunk in chunks(list(childs.keys())):
47+
ref.update({key: None for key in chunk})
48+
ref.delete()
49+
50+
fb_db.reference(f"v2/groups/{project_id}").delete()
5051
fb_db.reference(f"v2/projects/{project_id}/status").set("archived")
5152

52-
logger.info(
53-
f"Set status=archived in Postgres for project with the id {project_id}"
54-
)
5553
pg_db = auth.postgresDB()
5654
sql_query = """
5755
UPDATE projects SET status = 'archived'
5856
WHERE project_id = %(project_id)s;
5957
"""
6058
pg_db.query(sql_query, {"project_id": project_id})
59+
60+
return True

0 commit comments

Comments
 (0)